1 // Copyright 2018 The Amber Authors.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include "src/vulkan/device.h"
16
17 #include <algorithm>
18 #include <cstring>
19 #include <iomanip> // Vulkan wrappers: std::setw(), std::left/right
20 #include <iostream>
21 #include <memory>
22 #include <set>
23 #include <sstream>
24 #include <string>
25 #include <vector>
26
27 #include "src/make_unique.h"
28
29 namespace amber {
30 namespace vulkan {
31 namespace {
32
33 const char kVariablePointers[] = "VariablePointerFeatures.variablePointers";
34 const char kVariablePointersStorageBuffer[] =
35 "VariablePointerFeatures.variablePointersStorageBuffer";
36 const char kFloat16Int8_Float16[] = "Float16Int8Features.shaderFloat16";
37 const char kFloat16Int8_Int8[] = "Float16Int8Features.shaderInt8";
38 const char k8BitStorage_Storage[] =
39 "Storage8BitFeatures.storageBuffer8BitAccess";
40 const char k8BitStorage_UniformAndStorage[] =
41 "Storage8BitFeatures.uniformAndStorageBuffer8BitAccess";
42 const char k8BitStorage_PushConstant[] =
43 "Storage8BitFeatures.storagePushConstant8";
44 const char k16BitStorage_Storage[] =
45 "Storage16BitFeatures.storageBuffer16BitAccess";
46 const char k16BitStorage_UniformAndStorage[] =
47 "Storage16BitFeatures.uniformAndStorageBuffer16BitAccess";
48 const char k16BitStorage_PushConstant[] =
49 "Storage16BitFeatures.storagePushConstant16";
50 const char k16BitStorage_InputOutput[] =
51 "Storage16BitFeatures.storageInputOutput16";
52
53 const char kSubgroupSizeControl[] = "SubgroupSizeControl.subgroupSizeControl";
54 const char kComputeFullSubgroups[] = "SubgroupSizeControl.computeFullSubgroups";
55
56 const char kSubgroupSupportedOperations[] = "SubgroupSupportedOperations";
57 const char kSubgroupSupportedOperationsBasic[] =
58 "SubgroupSupportedOperations.basic";
59 const char kSubgroupSupportedOperationsVote[] =
60 "SubgroupSupportedOperations.vote";
61 const char kSubgroupSupportedOperationsArithmetic[] =
62 "SubgroupSupportedOperations.arithmetic";
63 const char kSubgroupSupportedOperationsBallot[] =
64 "SubgroupSupportedOperations.ballot";
65 const char kSubgroupSupportedOperationsShuffle[] =
66 "SubgroupSupportedOperations.shuffle";
67 const char kSubgroupSupportedOperationsShuffleRelative[] =
68 "SubgroupSupportedOperations.shuffleRelative";
69 const char kSubgroupSupportedOperationsClustered[] =
70 "SubgroupSupportedOperations.clustered";
71 const char kSubgroupSupportedOperationsQuad[] =
72 "SubgroupSupportedOperations.quad";
73 const char kSubgroupSupportedStages[] = "SubgroupSupportedStages";
74 const char kSubgroupSupportedStagesVertex[] = "SubgroupSupportedStages.vertex";
75 const char kSubgroupSupportedStagesTessellationControl[] =
76 "SubgroupSupportedStages.tessellationControl";
77 const char kSubgroupSupportedStagesTessellationEvaluation[] =
78 "SubgroupSupportedStages.tessellationEvaluation";
79 const char kSubgroupSupportedStagesGeometry[] =
80 "SubgroupSupportedStages.geometry";
81 const char kSubgroupSupportedStagesFragment[] =
82 "SubgroupSupportedStages.fragment";
83 const char kSubgroupSupportedStagesCompute[] =
84 "SubgroupSupportedStages.compute";
85
86 const char kShaderSubgroupExtendedTypes[] =
87 "ShaderSubgroupExtendedTypesFeatures.shaderSubgroupExtendedTypes";
88
89 struct BaseOutStructure {
90 VkStructureType sType;
91 void* pNext;
92 };
93
AreAllRequiredFeaturesSupported(const VkPhysicalDeviceFeatures & available_features,const std::vector<std::string> & required_features)94 bool AreAllRequiredFeaturesSupported(
95 const VkPhysicalDeviceFeatures& available_features,
96 const std::vector<std::string>& required_features) {
97 if (required_features.empty())
98 return true;
99
100 for (const auto& feature : required_features) {
101 if (feature == "robustBufferAccess") {
102 if (available_features.robustBufferAccess == VK_FALSE)
103 return false;
104 continue;
105 }
106 if (feature == "fullDrawIndexUint32") {
107 if (available_features.fullDrawIndexUint32 == VK_FALSE)
108 return false;
109 continue;
110 }
111 if (feature == "imageCubeArray") {
112 if (available_features.imageCubeArray == VK_FALSE)
113 return false;
114 continue;
115 }
116 if (feature == "independentBlend") {
117 if (available_features.independentBlend == VK_FALSE)
118 return false;
119 continue;
120 }
121 if (feature == "geometryShader") {
122 if (available_features.geometryShader == VK_FALSE)
123 return false;
124 continue;
125 }
126 if (feature == "tessellationShader") {
127 if (available_features.tessellationShader == VK_FALSE)
128 return false;
129 continue;
130 }
131 if (feature == "sampleRateShading") {
132 if (available_features.sampleRateShading == VK_FALSE)
133 return false;
134 continue;
135 }
136 if (feature == "dualSrcBlend") {
137 if (available_features.dualSrcBlend == VK_FALSE)
138 return false;
139 continue;
140 }
141 if (feature == "logicOp") {
142 if (available_features.logicOp == VK_FALSE)
143 return false;
144 continue;
145 }
146 if (feature == "multiDrawIndirect") {
147 if (available_features.multiDrawIndirect == VK_FALSE)
148 return false;
149 continue;
150 }
151 if (feature == "drawIndirectFirstInstance") {
152 if (available_features.drawIndirectFirstInstance == VK_FALSE)
153 return false;
154 continue;
155 }
156 if (feature == "depthClamp") {
157 if (available_features.depthClamp == VK_FALSE)
158 return false;
159 continue;
160 }
161 if (feature == "depthBiasClamp") {
162 if (available_features.depthBiasClamp == VK_FALSE)
163 return false;
164 continue;
165 }
166 if (feature == "fillModeNonSolid") {
167 if (available_features.fillModeNonSolid == VK_FALSE)
168 return false;
169 continue;
170 }
171 if (feature == "depthBounds") {
172 if (available_features.depthBounds == VK_FALSE)
173 return false;
174 continue;
175 }
176 if (feature == "wideLines") {
177 if (available_features.wideLines == VK_FALSE)
178 return false;
179 continue;
180 }
181 if (feature == "largePoints") {
182 if (available_features.largePoints == VK_FALSE)
183 return false;
184 continue;
185 }
186 if (feature == "alphaToOne") {
187 if (available_features.alphaToOne == VK_FALSE)
188 return false;
189 continue;
190 }
191 if (feature == "multiViewport") {
192 if (available_features.multiViewport == VK_FALSE)
193 return false;
194 continue;
195 }
196 if (feature == "samplerAnisotropy") {
197 if (available_features.samplerAnisotropy == VK_FALSE)
198 return false;
199 continue;
200 }
201 if (feature == "textureCompressionETC2") {
202 if (available_features.textureCompressionETC2 == VK_FALSE)
203 return false;
204 continue;
205 }
206 if (feature == "textureCompressionASTC_LDR") {
207 if (available_features.textureCompressionASTC_LDR == VK_FALSE)
208 return false;
209 continue;
210 }
211 if (feature == "textureCompressionBC") {
212 if (available_features.textureCompressionBC == VK_FALSE)
213 return false;
214 continue;
215 }
216 if (feature == "occlusionQueryPrecise") {
217 if (available_features.occlusionQueryPrecise == VK_FALSE)
218 return false;
219 continue;
220 }
221 if (feature == "pipelineStatisticsQuery") {
222 if (available_features.pipelineStatisticsQuery == VK_FALSE)
223 return false;
224 continue;
225 }
226 if (feature == "vertexPipelineStoresAndAtomics") {
227 if (available_features.vertexPipelineStoresAndAtomics == VK_FALSE)
228 return false;
229 continue;
230 }
231 if (feature == "fragmentStoresAndAtomics") {
232 if (available_features.fragmentStoresAndAtomics == VK_FALSE)
233 return false;
234 continue;
235 }
236 if (feature == "shaderTessellationAndGeometryPointSize") {
237 if (available_features.shaderTessellationAndGeometryPointSize == VK_FALSE)
238 return false;
239 continue;
240 }
241 if (feature == "shaderImageGatherExtended") {
242 if (available_features.shaderImageGatherExtended == VK_FALSE)
243 return false;
244 continue;
245 }
246 if (feature == "shaderStorageImageExtendedFormats") {
247 if (available_features.shaderStorageImageExtendedFormats == VK_FALSE)
248 return false;
249 continue;
250 }
251 if (feature == "shaderStorageImageMultisample") {
252 if (available_features.shaderStorageImageMultisample == VK_FALSE)
253 return false;
254 continue;
255 }
256 if (feature == "shaderStorageImageReadWithoutFormat") {
257 if (available_features.shaderStorageImageReadWithoutFormat == VK_FALSE)
258 return false;
259 continue;
260 }
261 if (feature == "shaderStorageImageWriteWithoutFormat") {
262 if (available_features.shaderStorageImageWriteWithoutFormat == VK_FALSE)
263 return false;
264 continue;
265 }
266 if (feature == "shaderUniformBufferArrayDynamicIndexing") {
267 if (available_features.shaderUniformBufferArrayDynamicIndexing ==
268 VK_FALSE)
269 return false;
270 continue;
271 }
272 if (feature == "shaderSampledImageArrayDynamicIndexing") {
273 if (available_features.shaderSampledImageArrayDynamicIndexing == VK_FALSE)
274 return false;
275 continue;
276 }
277 if (feature == "shaderStorageBufferArrayDynamicIndexing") {
278 if (available_features.shaderStorageBufferArrayDynamicIndexing ==
279 VK_FALSE)
280 return false;
281 continue;
282 }
283 if (feature == "shaderStorageImageArrayDynamicIndexing") {
284 if (available_features.shaderStorageImageArrayDynamicIndexing == VK_FALSE)
285 return false;
286 continue;
287 }
288 if (feature == "shaderClipDistance") {
289 if (available_features.shaderClipDistance == VK_FALSE)
290 return false;
291 continue;
292 }
293 if (feature == "shaderCullDistance") {
294 if (available_features.shaderCullDistance == VK_FALSE)
295 return false;
296 continue;
297 }
298 if (feature == "shaderFloat64") {
299 if (available_features.shaderFloat64 == VK_FALSE)
300 return false;
301 continue;
302 }
303 if (feature == "shaderInt64") {
304 if (available_features.shaderInt64 == VK_FALSE)
305 return false;
306 continue;
307 }
308 if (feature == "shaderInt16") {
309 if (available_features.shaderInt16 == VK_FALSE)
310 return false;
311 continue;
312 }
313 if (feature == "shaderResourceResidency") {
314 if (available_features.shaderResourceResidency == VK_FALSE)
315 return false;
316 continue;
317 }
318 if (feature == "shaderResourceMinLod") {
319 if (available_features.shaderResourceMinLod == VK_FALSE)
320 return false;
321 continue;
322 }
323 if (feature == "sparseBinding") {
324 if (available_features.sparseBinding == VK_FALSE)
325 return false;
326 continue;
327 }
328 if (feature == "sparseResidencyBuffer") {
329 if (available_features.sparseResidencyBuffer == VK_FALSE)
330 return false;
331 continue;
332 }
333 if (feature == "sparseResidencyImage2D") {
334 if (available_features.sparseResidencyImage2D == VK_FALSE)
335 return false;
336 continue;
337 }
338 if (feature == "sparseResidencyImage3D") {
339 if (available_features.sparseResidencyImage3D == VK_FALSE)
340 return false;
341 continue;
342 }
343 if (feature == "sparseResidency2Samples") {
344 if (available_features.sparseResidency2Samples == VK_FALSE)
345 return false;
346 continue;
347 }
348 if (feature == "sparseResidency4Samples") {
349 if (available_features.sparseResidency4Samples == VK_FALSE)
350 return false;
351 continue;
352 }
353 if (feature == "sparseResidency8Samples") {
354 if (available_features.sparseResidency8Samples == VK_FALSE)
355 return false;
356 continue;
357 }
358 if (feature == "sparseResidency16Samples") {
359 if (available_features.sparseResidency16Samples == VK_FALSE)
360 return false;
361 continue;
362 }
363 if (feature == "sparseResidencyAliased") {
364 if (available_features.sparseResidencyAliased == VK_FALSE)
365 return false;
366 continue;
367 }
368 if (feature == "variableMultisampleRate") {
369 if (available_features.variableMultisampleRate == VK_FALSE)
370 return false;
371 continue;
372 }
373 if (feature == "inheritedQueries") {
374 if (available_features.inheritedQueries == VK_FALSE)
375 return false;
376 continue;
377 }
378 }
379
380 return true;
381 }
382
AreAllExtensionsSupported(const std::vector<std::string> & available_extensions,const std::vector<std::string> & required_extensions)383 bool AreAllExtensionsSupported(
384 const std::vector<std::string>& available_extensions,
385 const std::vector<std::string>& required_extensions) {
386 if (required_extensions.empty())
387 return true;
388
389 std::set<std::string> required_extension_set(required_extensions.begin(),
390 required_extensions.end());
391 for (const auto& extension : available_extensions) {
392 required_extension_set.erase(extension);
393 }
394
395 return required_extension_set.empty();
396 }
397
398 } // namespace
399
Device(VkInstance instance,VkPhysicalDevice physical_device,uint32_t queue_family_index,VkDevice device,VkQueue queue)400 Device::Device(VkInstance instance,
401 VkPhysicalDevice physical_device,
402 uint32_t queue_family_index,
403 VkDevice device,
404 VkQueue queue)
405 : instance_(instance),
406 physical_device_(physical_device),
407 device_(device),
408 queue_(queue),
409 queue_family_index_(queue_family_index) {}
410
411 Device::~Device() = default;
412
LoadVulkanPointers(PFN_vkGetInstanceProcAddr getInstanceProcAddr,Delegate * delegate)413 Result Device::LoadVulkanPointers(PFN_vkGetInstanceProcAddr getInstanceProcAddr,
414 Delegate* delegate) {
415 // Note: logging Vulkan calls is done via the delegate rather than a Vulkan
416 // layer because we want such logging even when Amber is built as a native
417 // executable on Android, where Vulkan layers are usable only with APKs.
418 if (delegate && delegate->LogGraphicsCalls())
419 delegate->Log("Loading Vulkan Pointers");
420
421 #include "vk-wrappers-1-0.inc"
422
423 ptrs_.vkGetPhysicalDeviceProperties(physical_device_,
424 &physical_device_properties_);
425
426 if (SupportsApiVersion(1, 1, 0)) {
427 #include "vk-wrappers-1-1.inc"
428 }
429
430 return {};
431 }
432
SupportsApiVersion(uint32_t major,uint32_t minor,uint32_t patch)433 bool Device::SupportsApiVersion(uint32_t major,
434 uint32_t minor,
435 uint32_t patch) {
436 #pragma clang diagnostic push
437 #pragma clang diagnostic ignored "-Wold-style-cast"
438 return physical_device_properties_.apiVersion >=
439 VK_MAKE_VERSION(major, minor, patch);
440 #pragma clang diagnostic pop
441 }
442
Initialize(PFN_vkGetInstanceProcAddr getInstanceProcAddr,Delegate * delegate,const std::vector<std::string> & required_features,const std::vector<std::string> & required_device_extensions,const VkPhysicalDeviceFeatures & available_features,const VkPhysicalDeviceFeatures2KHR & available_features2,const std::vector<std::string> & available_extensions)443 Result Device::Initialize(
444 PFN_vkGetInstanceProcAddr getInstanceProcAddr,
445 Delegate* delegate,
446 const std::vector<std::string>& required_features,
447 const std::vector<std::string>& required_device_extensions,
448 const VkPhysicalDeviceFeatures& available_features,
449 const VkPhysicalDeviceFeatures2KHR& available_features2,
450 const std::vector<std::string>& available_extensions) {
451 Result r = LoadVulkanPointers(getInstanceProcAddr, delegate);
452 if (!r.IsSuccess())
453 return r;
454
455 // Check for the core features. We don't know if available_features or
456 // available_features2 is provided, so check both.
457 if (!AreAllRequiredFeaturesSupported(available_features, required_features) &&
458 !AreAllRequiredFeaturesSupported(available_features2.features,
459 required_features)) {
460 return Result(
461 "Vulkan: Device::Initialize given physical device does not support "
462 "required features");
463 }
464
465 // Search for additional features in case they are found in pNext field of
466 // available_features2.
467 VkPhysicalDeviceVariablePointerFeaturesKHR* var_ptrs = nullptr;
468 VkPhysicalDeviceFloat16Int8FeaturesKHR* float16_ptrs = nullptr;
469 VkPhysicalDevice8BitStorageFeaturesKHR* storage8_ptrs = nullptr;
470 VkPhysicalDevice16BitStorageFeaturesKHR* storage16_ptrs = nullptr;
471 VkPhysicalDeviceVulkan11Features* vulkan11_ptrs = nullptr;
472 VkPhysicalDeviceVulkan12Features* vulkan12_ptrs = nullptr;
473 VkPhysicalDeviceVulkan13Features* vulkan13_ptrs = nullptr;
474 VkPhysicalDeviceSubgroupSizeControlFeaturesEXT*
475 subgroup_size_control_features = nullptr;
476 VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures*
477 shader_subgroup_extended_types_ptrs = nullptr;
478 void* ptr = available_features2.pNext;
479 while (ptr != nullptr) {
480 BaseOutStructure* s = static_cast<BaseOutStructure*>(ptr);
481 switch (s->sType) {
482 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR:
483 var_ptrs =
484 static_cast<VkPhysicalDeviceVariablePointerFeaturesKHR*>(ptr);
485 break;
486 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR:
487 float16_ptrs =
488 static_cast<VkPhysicalDeviceFloat16Int8FeaturesKHR*>(ptr);
489 break;
490 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR:
491 storage8_ptrs =
492 static_cast<VkPhysicalDevice8BitStorageFeaturesKHR*>(ptr);
493 break;
494 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR:
495 storage16_ptrs =
496 static_cast<VkPhysicalDevice16BitStorageFeaturesKHR*>(ptr);
497 break;
498 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT:
499 subgroup_size_control_features =
500 static_cast<VkPhysicalDeviceSubgroupSizeControlFeaturesEXT*>(ptr);
501 break;
502 // NOLINTNEXTLINE(whitespace/line_length)
503 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES:
504 shader_subgroup_extended_types_ptrs =
505 static_cast<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures*>(
506 ptr);
507 break;
508 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES:
509 vulkan11_ptrs = static_cast<VkPhysicalDeviceVulkan11Features*>(ptr);
510 break;
511 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES:
512 vulkan12_ptrs = static_cast<VkPhysicalDeviceVulkan12Features*>(ptr);
513 break;
514 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES:
515 vulkan13_ptrs = static_cast<VkPhysicalDeviceVulkan13Features*>(ptr);
516 break;
517 default:
518 break;
519 }
520 ptr = s->pNext;
521 }
522
523 // Compare the available additional (non-core) features against the
524 // requirements.
525 //
526 // Vulkan 1.2 added support for defining non-core physical device features
527 // using VkPhysicalDeviceVulkan11Features and VkPhysicalDeviceVulkan12Features
528 // structures. If |vulkan11_ptrs| and/or |vulkan12_ptrs| are null, we must
529 // check for features using the old approach (by checking across various
530 // feature structs); otherwise, we can check features via the new structs.
531 for (const auto& feature : required_features) {
532 // First check the feature structures are provided for the required
533 // features.
534 if ((feature == kVariablePointers ||
535 feature == kVariablePointersStorageBuffer) &&
536 var_ptrs == nullptr && vulkan11_ptrs == nullptr) {
537 return amber::Result(
538 "Variable pointers requested but feature not returned");
539 }
540 if ((feature == k16BitStorage_Storage ||
541 feature == k16BitStorage_UniformAndStorage ||
542 feature == k16BitStorage_PushConstant ||
543 feature == k16BitStorage_InputOutput) &&
544 storage16_ptrs == nullptr && vulkan11_ptrs == nullptr) {
545 return amber::Result(
546 "Shader 16-bit storage requested but feature not returned");
547 }
548 if ((feature == kFloat16Int8_Float16 || feature == kFloat16Int8_Int8) &&
549 float16_ptrs == nullptr && vulkan12_ptrs == nullptr) {
550 return amber::Result(
551 "Shader float16/int8 requested but feature not returned");
552 }
553 if ((feature == k8BitStorage_UniformAndStorage ||
554 feature == k8BitStorage_Storage ||
555 feature == k8BitStorage_PushConstant) &&
556 storage8_ptrs == nullptr && vulkan12_ptrs == nullptr) {
557 return amber::Result(
558 "Shader 8-bit storage requested but feature not returned");
559 }
560 if ((feature == kSubgroupSizeControl || feature == kComputeFullSubgroups) &&
561 subgroup_size_control_features == nullptr && vulkan13_ptrs == nullptr) {
562 return amber::Result("Missing subgroup size control features");
563 }
564 if (feature == kShaderSubgroupExtendedTypes &&
565 shader_subgroup_extended_types_ptrs == nullptr &&
566 vulkan12_ptrs == nullptr) {
567 return amber::Result(
568 "Subgroup extended types requested but feature not returned");
569 }
570
571 // Next check the fields of the feature structures.
572
573 // If Vulkan 1.1 structure exists the features are set there.
574 if (vulkan11_ptrs) {
575 if (feature == kVariablePointers &&
576 vulkan11_ptrs->variablePointers != VK_TRUE) {
577 return amber::Result("Missing variable pointers feature");
578 }
579 if (feature == kVariablePointersStorageBuffer &&
580 vulkan11_ptrs->variablePointersStorageBuffer != VK_TRUE) {
581 return amber::Result(
582 "Missing variable pointers storage buffer feature");
583 }
584 if (feature == k16BitStorage_Storage &&
585 vulkan11_ptrs->storageBuffer16BitAccess != VK_TRUE) {
586 return amber::Result("Missing 16-bit storage access");
587 }
588 if (feature == k16BitStorage_UniformAndStorage &&
589 vulkan11_ptrs->uniformAndStorageBuffer16BitAccess != VK_TRUE) {
590 return amber::Result("Missing 16-bit uniform and storage access");
591 }
592 if (feature == k16BitStorage_PushConstant &&
593 vulkan11_ptrs->storagePushConstant16 != VK_TRUE) {
594 return amber::Result("Missing 16-bit push constant access");
595 }
596 if (feature == k16BitStorage_InputOutput &&
597 vulkan11_ptrs->storageInputOutput16 != VK_TRUE) {
598 return amber::Result("Missing 16-bit input/output access");
599 }
600 } else {
601 // Vulkan 1.1 structure was not found. Use separate structures per each
602 // feature.
603 if (feature == kVariablePointers &&
604 var_ptrs->variablePointers != VK_TRUE) {
605 return amber::Result("Missing variable pointers feature");
606 }
607 if (feature == kVariablePointersStorageBuffer &&
608 var_ptrs->variablePointersStorageBuffer != VK_TRUE) {
609 return amber::Result(
610 "Missing variable pointers storage buffer feature");
611 }
612 if (feature == k16BitStorage_Storage &&
613 storage16_ptrs->storageBuffer16BitAccess != VK_TRUE) {
614 return amber::Result("Missing 16-bit storage access");
615 }
616 if (feature == k16BitStorage_UniformAndStorage &&
617 storage16_ptrs->uniformAndStorageBuffer16BitAccess != VK_TRUE) {
618 return amber::Result("Missing 16-bit uniform and storage access");
619 }
620 if (feature == k16BitStorage_PushConstant &&
621 storage16_ptrs->storagePushConstant16 != VK_TRUE) {
622 return amber::Result("Missing 16-bit push constant access");
623 }
624 if (feature == k16BitStorage_InputOutput &&
625 storage16_ptrs->storageInputOutput16 != VK_TRUE) {
626 return amber::Result("Missing 16-bit input/output access");
627 }
628 }
629
630 // If Vulkan 1.2 structure exists the features are set there.
631 if (vulkan12_ptrs) {
632 if (feature == kFloat16Int8_Float16 &&
633 vulkan12_ptrs->shaderFloat16 != VK_TRUE) {
634 return amber::Result("Missing float16 feature");
635 }
636 if (feature == kFloat16Int8_Int8 &&
637 vulkan12_ptrs->shaderInt8 != VK_TRUE) {
638 return amber::Result("Missing int8 feature");
639 }
640 if (feature == k8BitStorage_Storage &&
641 vulkan12_ptrs->storageBuffer8BitAccess != VK_TRUE) {
642 return amber::Result("Missing 8-bit storage access");
643 }
644 if (feature == k8BitStorage_UniformAndStorage &&
645 vulkan12_ptrs->uniformAndStorageBuffer8BitAccess != VK_TRUE) {
646 return amber::Result("Missing 8-bit uniform and storage access");
647 }
648 if (feature == k8BitStorage_PushConstant &&
649 vulkan12_ptrs->storagePushConstant8 != VK_TRUE) {
650 return amber::Result("Missing 8-bit push constant access");
651 }
652 if (feature == kShaderSubgroupExtendedTypes &&
653 vulkan12_ptrs->shaderSubgroupExtendedTypes != VK_TRUE) {
654 return amber::Result("Missing subgroup extended types");
655 }
656 } else {
657 // Vulkan 1.2 structure was not found. Use separate structures per each
658 // feature.
659 if (feature == kFloat16Int8_Float16 &&
660 float16_ptrs->shaderFloat16 != VK_TRUE) {
661 return amber::Result("Missing float16 feature");
662 }
663 if (feature == kFloat16Int8_Int8 && float16_ptrs->shaderInt8 != VK_TRUE) {
664 return amber::Result("Missing int8 feature");
665 }
666 if (feature == k8BitStorage_Storage &&
667 storage8_ptrs->storageBuffer8BitAccess != VK_TRUE) {
668 return amber::Result("Missing 8-bit storage access");
669 }
670 if (feature == k8BitStorage_UniformAndStorage &&
671 storage8_ptrs->uniformAndStorageBuffer8BitAccess != VK_TRUE) {
672 return amber::Result("Missing 8-bit uniform and storage access");
673 }
674 if (feature == k8BitStorage_PushConstant &&
675 storage8_ptrs->storagePushConstant8 != VK_TRUE) {
676 return amber::Result("Missing 8-bit push constant access");
677 }
678 if (feature == kShaderSubgroupExtendedTypes &&
679 shader_subgroup_extended_types_ptrs->shaderSubgroupExtendedTypes !=
680 VK_TRUE) {
681 return amber::Result("Missing subgroup extended types");
682 }
683 }
684
685 // If Vulkan 1.3 structure exists the features are set there.
686 if (vulkan13_ptrs) {
687 if (feature == kSubgroupSizeControl &&
688 vulkan13_ptrs->subgroupSizeControl != VK_TRUE) {
689 return amber::Result("Missing subgroup size control feature");
690 }
691 if (feature == kComputeFullSubgroups &&
692 vulkan13_ptrs->computeFullSubgroups != VK_TRUE) {
693 return amber::Result("Missing compute full subgroups feature");
694 }
695 } else {
696 if (feature == kSubgroupSizeControl &&
697 subgroup_size_control_features->subgroupSizeControl != VK_TRUE) {
698 return amber::Result("Missing subgroup size control feature");
699 }
700 if (feature == kComputeFullSubgroups &&
701 subgroup_size_control_features->computeFullSubgroups != VK_TRUE) {
702 return amber::Result("Missing compute full subgroups feature");
703 }
704 }
705 }
706
707 if (!AreAllExtensionsSupported(available_extensions,
708 required_device_extensions)) {
709 return Result(
710 "Vulkan: Device::Initialize given physical device does not support "
711 "required extensions");
712 }
713
714 ptrs_.vkGetPhysicalDeviceMemoryProperties(physical_device_,
715 &physical_memory_properties_);
716
717 subgroup_size_control_properties_ = {};
718 const bool needs_subgroup_size_control =
719 std::find(required_features.begin(), required_features.end(),
720 kSubgroupSizeControl) != required_features.end();
721
722 bool needs_subgroup_supported_operations = false;
723 bool needs_subgroup_supported_stages = false;
724
725 // Search for subgroup supported operations requirements.
726 for (const auto& feature : required_features)
727 if (feature.find(kSubgroupSupportedOperations) != std::string::npos)
728 needs_subgroup_supported_operations = true;
729
730 // Search for subgroup supported stages requirements.
731 for (const auto& feature : required_features)
732 if (feature.find(kSubgroupSupportedStages) != std::string::npos)
733 needs_subgroup_supported_stages = true;
734
735 const bool needs_subgroup_properties =
736 needs_subgroup_supported_operations || needs_subgroup_supported_stages;
737
738 if (needs_subgroup_size_control || needs_subgroup_properties) {
739 // Always chain all physical device properties structs in case at least one
740 // of them is needed.
741 VkPhysicalDeviceProperties2 properties2 = {};
742 VkPhysicalDeviceSubgroupProperties subgroup_properties = {};
743 VkPhysicalDeviceVulkan11Properties vulkan11_properties = {};
744 VkSubgroupFeatureFlags subgroup_supported_operations;
745 VkShaderStageFlags subgroup_supported_stages;
746 properties2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
747 properties2.pNext = &subgroup_size_control_properties_;
748 subgroup_size_control_properties_.sType =
749 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT;
750 if (SupportsApiVersion(1, 2, 0)) {
751 subgroup_size_control_properties_.pNext = &vulkan11_properties;
752 vulkan11_properties.sType =
753 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES;
754 } else {
755 subgroup_size_control_properties_.pNext = &subgroup_properties;
756 subgroup_properties.sType =
757 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES;
758 }
759
760 if (needs_subgroup_size_control && !SupportsApiVersion(1, 1, 0)) {
761 return Result(
762 "Vulkan: Device::Initialize subgroup size control feature also "
763 "requires an API version of 1.1 or higher");
764 }
765 if (needs_subgroup_properties && !SupportsApiVersion(1, 1, 0)) {
766 return Result(
767 "Vulkan: Device::Initialize subgroup properties also "
768 "requires an API version of 1.1 or higher");
769 }
770 ptrs_.vkGetPhysicalDeviceProperties2(physical_device_, &properties2);
771
772 if (needs_subgroup_supported_operations) {
773 // Read supported subgroup operations from the correct struct depending on
774 // the device API
775 if (SupportsApiVersion(1, 2, 0)) {
776 subgroup_supported_operations =
777 vulkan11_properties.subgroupSupportedOperations;
778 } else {
779 subgroup_supported_operations = subgroup_properties.supportedOperations;
780 }
781
782 for (const auto& feature : required_features) {
783 if (feature == kSubgroupSupportedOperationsBasic &&
784 !(subgroup_supported_operations & VK_SUBGROUP_FEATURE_BASIC_BIT)) {
785 return amber::Result("Missing subgroup operation basic feature");
786 }
787 if (feature == kSubgroupSupportedOperationsVote &&
788 !(subgroup_supported_operations & VK_SUBGROUP_FEATURE_VOTE_BIT)) {
789 return amber::Result("Missing subgroup operation vote feature");
790 }
791 if (feature == kSubgroupSupportedOperationsArithmetic &&
792 !(subgroup_supported_operations &
793 VK_SUBGROUP_FEATURE_ARITHMETIC_BIT)) {
794 return amber::Result("Missing subgroup operation arithmetic feature");
795 }
796 if (feature == kSubgroupSupportedOperationsBallot &&
797 !(subgroup_supported_operations & VK_SUBGROUP_FEATURE_BALLOT_BIT)) {
798 return amber::Result("Missing subgroup operation ballot feature");
799 }
800 if (feature == kSubgroupSupportedOperationsShuffle &&
801 !(subgroup_supported_operations &
802 VK_SUBGROUP_FEATURE_SHUFFLE_BIT)) {
803 return amber::Result("Missing subgroup operation shuffle feature");
804 }
805 if (feature == kSubgroupSupportedOperationsShuffleRelative &&
806 !(subgroup_supported_operations &
807 VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT)) {
808 return amber::Result(
809 "Missing subgroup operation shuffle relative feature");
810 }
811 if (feature == kSubgroupSupportedOperationsClustered &&
812 !(subgroup_supported_operations &
813 VK_SUBGROUP_FEATURE_CLUSTERED_BIT)) {
814 return amber::Result("Missing subgroup operation clustered feature");
815 }
816 if (feature == kSubgroupSupportedOperationsQuad &&
817 !(subgroup_supported_operations & VK_SUBGROUP_FEATURE_QUAD_BIT)) {
818 return amber::Result("Missing subgroup operation quad feature");
819 }
820 }
821 }
822
823 if (needs_subgroup_supported_stages) {
824 // Read supported subgroup stages from the correct struct depending on the
825 // device API
826 if (SupportsApiVersion(1, 2, 0)) {
827 subgroup_supported_stages = vulkan11_properties.subgroupSupportedStages;
828 } else {
829 subgroup_supported_stages = subgroup_properties.supportedStages;
830 }
831
832 for (const auto& feature : required_features) {
833 if (feature == kSubgroupSupportedStagesVertex &&
834 !(subgroup_supported_stages & VK_SHADER_STAGE_VERTEX_BIT)) {
835 return amber::Result(
836 "Subgroup operations not supported for vertex shader stage");
837 }
838 if (feature == kSubgroupSupportedStagesTessellationControl &&
839 !(subgroup_supported_stages &
840 VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT)) {
841 return amber::Result(
842 "Subgroup operations not supported for tessellation control "
843 "shader stage");
844 }
845 if (feature == kSubgroupSupportedStagesTessellationEvaluation &&
846 !(subgroup_supported_stages &
847 VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)) {
848 return amber::Result(
849 "Subgroup operations not supported for tessellation evaluation "
850 "shader stage");
851 }
852 if (feature == kSubgroupSupportedStagesGeometry &&
853 !(subgroup_supported_stages & VK_SHADER_STAGE_GEOMETRY_BIT)) {
854 return amber::Result(
855 "Subgroup operations not supported for geometry shader stage");
856 }
857 if (feature == kSubgroupSupportedStagesFragment &&
858 !(subgroup_supported_stages & VK_SHADER_STAGE_FRAGMENT_BIT)) {
859 return amber::Result(
860 "Subgroup operations not supported for fragment shader stage");
861 }
862 if (feature == kSubgroupSupportedStagesCompute &&
863 !(subgroup_supported_stages & VK_SHADER_STAGE_COMPUTE_BIT)) {
864 return amber::Result(
865 "Subgroup operations not supported for compute shader stage");
866 }
867 }
868 }
869 }
870
871 return {};
872 }
873
IsFormatSupportedByPhysicalDevice(const Format & format,BufferType type)874 bool Device::IsFormatSupportedByPhysicalDevice(const Format& format,
875 BufferType type) {
876 VkFormat vk_format = GetVkFormat(format);
877 VkFormatProperties properties = VkFormatProperties();
878 GetPtrs()->vkGetPhysicalDeviceFormatProperties(physical_device_, vk_format,
879 &properties);
880
881 VkFormatFeatureFlagBits flag = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT;
882 bool is_buffer_type_image = false;
883 switch (type) {
884 case BufferType::kColor:
885 case BufferType::kResolve:
886 case BufferType::kStorageImage:
887 flag = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT;
888 is_buffer_type_image = true;
889 break;
890 case BufferType::kDepthStencil:
891 flag = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT;
892 is_buffer_type_image = true;
893 break;
894 case BufferType::kSampledImage:
895 case BufferType::kCombinedImageSampler:
896 flag = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT;
897 is_buffer_type_image = true;
898 break;
899 case BufferType::kVertex:
900 flag = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT;
901 is_buffer_type_image = false;
902 break;
903 default:
904 return false;
905 }
906
907 return ((is_buffer_type_image ? properties.optimalTilingFeatures
908 : properties.bufferFeatures) &
909 flag) == flag;
910 }
911
HasMemoryFlags(uint32_t memory_type_index,const VkMemoryPropertyFlags flags) const912 bool Device::HasMemoryFlags(uint32_t memory_type_index,
913 const VkMemoryPropertyFlags flags) const {
914 return (physical_memory_properties_.memoryTypes[memory_type_index]
915 .propertyFlags &
916 flags) == flags;
917 }
918
IsMemoryHostAccessible(uint32_t memory_type_index) const919 bool Device::IsMemoryHostAccessible(uint32_t memory_type_index) const {
920 return HasMemoryFlags(memory_type_index, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
921 }
922
IsMemoryHostCoherent(uint32_t memory_type_index) const923 bool Device::IsMemoryHostCoherent(uint32_t memory_type_index) const {
924 return HasMemoryFlags(memory_type_index,
925 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
926 }
927
GetMaxPushConstants() const928 uint32_t Device::GetMaxPushConstants() const {
929 return physical_device_properties_.limits.maxPushConstantsSize;
930 }
931
IsDescriptorSetInBounds(uint32_t descriptor_set) const932 bool Device::IsDescriptorSetInBounds(uint32_t descriptor_set) const {
933 VkPhysicalDeviceProperties properties = VkPhysicalDeviceProperties();
934 GetPtrs()->vkGetPhysicalDeviceProperties(physical_device_, &properties);
935 return properties.limits.maxBoundDescriptorSets > descriptor_set;
936 }
937
GetVkFormat(const Format & format) const938 VkFormat Device::GetVkFormat(const Format& format) const {
939 VkFormat ret = VK_FORMAT_UNDEFINED;
940 switch (format.GetFormatType()) {
941 case FormatType::kUnknown:
942 ret = VK_FORMAT_UNDEFINED;
943 break;
944 case FormatType::kA1R5G5B5_UNORM_PACK16:
945 ret = VK_FORMAT_A1R5G5B5_UNORM_PACK16;
946 break;
947 case FormatType::kA2B10G10R10_SINT_PACK32:
948 ret = VK_FORMAT_A2B10G10R10_SINT_PACK32;
949 break;
950 case FormatType::kA2B10G10R10_SNORM_PACK32:
951 ret = VK_FORMAT_A2B10G10R10_SNORM_PACK32;
952 break;
953 case FormatType::kA2B10G10R10_SSCALED_PACK32:
954 ret = VK_FORMAT_A2B10G10R10_SSCALED_PACK32;
955 break;
956 case FormatType::kA2B10G10R10_UINT_PACK32:
957 ret = VK_FORMAT_A2B10G10R10_UINT_PACK32;
958 break;
959 case FormatType::kA2B10G10R10_UNORM_PACK32:
960 ret = VK_FORMAT_A2B10G10R10_UNORM_PACK32;
961 break;
962 case FormatType::kA2B10G10R10_USCALED_PACK32:
963 ret = VK_FORMAT_A2B10G10R10_USCALED_PACK32;
964 break;
965 case FormatType::kA2R10G10B10_SINT_PACK32:
966 ret = VK_FORMAT_A2R10G10B10_SINT_PACK32;
967 break;
968 case FormatType::kA2R10G10B10_SNORM_PACK32:
969 ret = VK_FORMAT_A2R10G10B10_SNORM_PACK32;
970 break;
971 case FormatType::kA2R10G10B10_SSCALED_PACK32:
972 ret = VK_FORMAT_A2R10G10B10_SSCALED_PACK32;
973 break;
974 case FormatType::kA2R10G10B10_UINT_PACK32:
975 ret = VK_FORMAT_A2R10G10B10_UINT_PACK32;
976 break;
977 case FormatType::kA2R10G10B10_UNORM_PACK32:
978 ret = VK_FORMAT_A2R10G10B10_UNORM_PACK32;
979 break;
980 case FormatType::kA2R10G10B10_USCALED_PACK32:
981 ret = VK_FORMAT_A2R10G10B10_USCALED_PACK32;
982 break;
983 case FormatType::kA8B8G8R8_SINT_PACK32:
984 ret = VK_FORMAT_A8B8G8R8_SINT_PACK32;
985 break;
986 case FormatType::kA8B8G8R8_SNORM_PACK32:
987 ret = VK_FORMAT_A8B8G8R8_SNORM_PACK32;
988 break;
989 case FormatType::kA8B8G8R8_SRGB_PACK32:
990 ret = VK_FORMAT_A8B8G8R8_SRGB_PACK32;
991 break;
992 case FormatType::kA8B8G8R8_SSCALED_PACK32:
993 ret = VK_FORMAT_A8B8G8R8_SSCALED_PACK32;
994 break;
995 case FormatType::kA8B8G8R8_UINT_PACK32:
996 ret = VK_FORMAT_A8B8G8R8_UINT_PACK32;
997 break;
998 case FormatType::kA8B8G8R8_UNORM_PACK32:
999 ret = VK_FORMAT_A8B8G8R8_UNORM_PACK32;
1000 break;
1001 case FormatType::kA8B8G8R8_USCALED_PACK32:
1002 ret = VK_FORMAT_A8B8G8R8_USCALED_PACK32;
1003 break;
1004 case FormatType::kB10G11R11_UFLOAT_PACK32:
1005 ret = VK_FORMAT_B10G11R11_UFLOAT_PACK32;
1006 break;
1007 case FormatType::kB4G4R4A4_UNORM_PACK16:
1008 ret = VK_FORMAT_B4G4R4A4_UNORM_PACK16;
1009 break;
1010 case FormatType::kB5G5R5A1_UNORM_PACK16:
1011 ret = VK_FORMAT_B5G5R5A1_UNORM_PACK16;
1012 break;
1013 case FormatType::kB5G6R5_UNORM_PACK16:
1014 ret = VK_FORMAT_B5G6R5_UNORM_PACK16;
1015 break;
1016 case FormatType::kB8G8R8A8_SINT:
1017 ret = VK_FORMAT_B8G8R8A8_SINT;
1018 break;
1019 case FormatType::kB8G8R8A8_SNORM:
1020 ret = VK_FORMAT_B8G8R8A8_SNORM;
1021 break;
1022 case FormatType::kB8G8R8A8_SRGB:
1023 ret = VK_FORMAT_B8G8R8A8_SRGB;
1024 break;
1025 case FormatType::kB8G8R8A8_SSCALED:
1026 ret = VK_FORMAT_B8G8R8A8_SSCALED;
1027 break;
1028 case FormatType::kB8G8R8A8_UINT:
1029 ret = VK_FORMAT_B8G8R8A8_UINT;
1030 break;
1031 case FormatType::kB8G8R8A8_UNORM:
1032 ret = VK_FORMAT_B8G8R8A8_UNORM;
1033 break;
1034 case FormatType::kB8G8R8A8_USCALED:
1035 ret = VK_FORMAT_B8G8R8A8_USCALED;
1036 break;
1037 case FormatType::kB8G8R8_SINT:
1038 ret = VK_FORMAT_B8G8R8_SINT;
1039 break;
1040 case FormatType::kB8G8R8_SNORM:
1041 ret = VK_FORMAT_B8G8R8_SNORM;
1042 break;
1043 case FormatType::kB8G8R8_SRGB:
1044 ret = VK_FORMAT_B8G8R8_SRGB;
1045 break;
1046 case FormatType::kB8G8R8_SSCALED:
1047 ret = VK_FORMAT_B8G8R8_SSCALED;
1048 break;
1049 case FormatType::kB8G8R8_UINT:
1050 ret = VK_FORMAT_B8G8R8_UINT;
1051 break;
1052 case FormatType::kB8G8R8_UNORM:
1053 ret = VK_FORMAT_B8G8R8_UNORM;
1054 break;
1055 case FormatType::kB8G8R8_USCALED:
1056 ret = VK_FORMAT_B8G8R8_USCALED;
1057 break;
1058 case FormatType::kD16_UNORM:
1059 ret = VK_FORMAT_D16_UNORM;
1060 break;
1061 case FormatType::kD16_UNORM_S8_UINT:
1062 ret = VK_FORMAT_D16_UNORM_S8_UINT;
1063 break;
1064 case FormatType::kD24_UNORM_S8_UINT:
1065 ret = VK_FORMAT_D24_UNORM_S8_UINT;
1066 break;
1067 case FormatType::kD32_SFLOAT:
1068 ret = VK_FORMAT_D32_SFLOAT;
1069 break;
1070 case FormatType::kD32_SFLOAT_S8_UINT:
1071 ret = VK_FORMAT_D32_SFLOAT_S8_UINT;
1072 break;
1073 case FormatType::kR16G16B16A16_SFLOAT:
1074 ret = VK_FORMAT_R16G16B16A16_SFLOAT;
1075 break;
1076 case FormatType::kR16G16B16A16_SINT:
1077 ret = VK_FORMAT_R16G16B16A16_SINT;
1078 break;
1079 case FormatType::kR16G16B16A16_SNORM:
1080 ret = VK_FORMAT_R16G16B16A16_SNORM;
1081 break;
1082 case FormatType::kR16G16B16A16_SSCALED:
1083 ret = VK_FORMAT_R16G16B16A16_SSCALED;
1084 break;
1085 case FormatType::kR16G16B16A16_UINT:
1086 ret = VK_FORMAT_R16G16B16A16_UINT;
1087 break;
1088 case FormatType::kR16G16B16A16_UNORM:
1089 ret = VK_FORMAT_R16G16B16A16_UNORM;
1090 break;
1091 case FormatType::kR16G16B16A16_USCALED:
1092 ret = VK_FORMAT_R16G16B16A16_USCALED;
1093 break;
1094 case FormatType::kR16G16B16_SFLOAT:
1095 ret = VK_FORMAT_R16G16B16_SFLOAT;
1096 break;
1097 case FormatType::kR16G16B16_SINT:
1098 ret = VK_FORMAT_R16G16B16_SINT;
1099 break;
1100 case FormatType::kR16G16B16_SNORM:
1101 ret = VK_FORMAT_R16G16B16_SNORM;
1102 break;
1103 case FormatType::kR16G16B16_SSCALED:
1104 ret = VK_FORMAT_R16G16B16_SSCALED;
1105 break;
1106 case FormatType::kR16G16B16_UINT:
1107 ret = VK_FORMAT_R16G16B16_UINT;
1108 break;
1109 case FormatType::kR16G16B16_UNORM:
1110 ret = VK_FORMAT_R16G16B16_UNORM;
1111 break;
1112 case FormatType::kR16G16B16_USCALED:
1113 ret = VK_FORMAT_R16G16B16_USCALED;
1114 break;
1115 case FormatType::kR16G16_SFLOAT:
1116 ret = VK_FORMAT_R16G16_SFLOAT;
1117 break;
1118 case FormatType::kR16G16_SINT:
1119 ret = VK_FORMAT_R16G16_SINT;
1120 break;
1121 case FormatType::kR16G16_SNORM:
1122 ret = VK_FORMAT_R16G16_SNORM;
1123 break;
1124 case FormatType::kR16G16_SSCALED:
1125 ret = VK_FORMAT_R16G16_SSCALED;
1126 break;
1127 case FormatType::kR16G16_UINT:
1128 ret = VK_FORMAT_R16G16_UINT;
1129 break;
1130 case FormatType::kR16G16_UNORM:
1131 ret = VK_FORMAT_R16G16_UNORM;
1132 break;
1133 case FormatType::kR16G16_USCALED:
1134 ret = VK_FORMAT_R16G16_USCALED;
1135 break;
1136 case FormatType::kR16_SFLOAT:
1137 ret = VK_FORMAT_R16_SFLOAT;
1138 break;
1139 case FormatType::kR16_SINT:
1140 ret = VK_FORMAT_R16_SINT;
1141 break;
1142 case FormatType::kR16_SNORM:
1143 ret = VK_FORMAT_R16_SNORM;
1144 break;
1145 case FormatType::kR16_SSCALED:
1146 ret = VK_FORMAT_R16_SSCALED;
1147 break;
1148 case FormatType::kR16_UINT:
1149 ret = VK_FORMAT_R16_UINT;
1150 break;
1151 case FormatType::kR16_UNORM:
1152 ret = VK_FORMAT_R16_UNORM;
1153 break;
1154 case FormatType::kR16_USCALED:
1155 ret = VK_FORMAT_R16_USCALED;
1156 break;
1157 case FormatType::kR32G32B32A32_SFLOAT:
1158 ret = VK_FORMAT_R32G32B32A32_SFLOAT;
1159 break;
1160 case FormatType::kR32G32B32A32_SINT:
1161 ret = VK_FORMAT_R32G32B32A32_SINT;
1162 break;
1163 case FormatType::kR32G32B32A32_UINT:
1164 ret = VK_FORMAT_R32G32B32A32_UINT;
1165 break;
1166 case FormatType::kR32G32B32_SFLOAT:
1167 ret = VK_FORMAT_R32G32B32_SFLOAT;
1168 break;
1169 case FormatType::kR32G32B32_SINT:
1170 ret = VK_FORMAT_R32G32B32_SINT;
1171 break;
1172 case FormatType::kR32G32B32_UINT:
1173 ret = VK_FORMAT_R32G32B32_UINT;
1174 break;
1175 case FormatType::kR32G32_SFLOAT:
1176 ret = VK_FORMAT_R32G32_SFLOAT;
1177 break;
1178 case FormatType::kR32G32_SINT:
1179 ret = VK_FORMAT_R32G32_SINT;
1180 break;
1181 case FormatType::kR32G32_UINT:
1182 ret = VK_FORMAT_R32G32_UINT;
1183 break;
1184 case FormatType::kR32_SFLOAT:
1185 ret = VK_FORMAT_R32_SFLOAT;
1186 break;
1187 case FormatType::kR32_SINT:
1188 ret = VK_FORMAT_R32_SINT;
1189 break;
1190 case FormatType::kR32_UINT:
1191 ret = VK_FORMAT_R32_UINT;
1192 break;
1193 case FormatType::kR4G4B4A4_UNORM_PACK16:
1194 ret = VK_FORMAT_R4G4B4A4_UNORM_PACK16;
1195 break;
1196 case FormatType::kR4G4_UNORM_PACK8:
1197 ret = VK_FORMAT_R4G4_UNORM_PACK8;
1198 break;
1199 case FormatType::kR5G5B5A1_UNORM_PACK16:
1200 ret = VK_FORMAT_R5G5B5A1_UNORM_PACK16;
1201 break;
1202 case FormatType::kR5G6B5_UNORM_PACK16:
1203 ret = VK_FORMAT_R5G6B5_UNORM_PACK16;
1204 break;
1205 case FormatType::kR64G64B64A64_SFLOAT:
1206 ret = VK_FORMAT_R64G64B64A64_SFLOAT;
1207 break;
1208 case FormatType::kR64G64B64A64_SINT:
1209 ret = VK_FORMAT_R64G64B64A64_SINT;
1210 break;
1211 case FormatType::kR64G64B64A64_UINT:
1212 ret = VK_FORMAT_R64G64B64A64_UINT;
1213 break;
1214 case FormatType::kR64G64B64_SFLOAT:
1215 ret = VK_FORMAT_R64G64B64_SFLOAT;
1216 break;
1217 case FormatType::kR64G64B64_SINT:
1218 ret = VK_FORMAT_R64G64B64_SINT;
1219 break;
1220 case FormatType::kR64G64B64_UINT:
1221 ret = VK_FORMAT_R64G64B64_UINT;
1222 break;
1223 case FormatType::kR64G64_SFLOAT:
1224 ret = VK_FORMAT_R64G64_SFLOAT;
1225 break;
1226 case FormatType::kR64G64_SINT:
1227 ret = VK_FORMAT_R64G64_SINT;
1228 break;
1229 case FormatType::kR64G64_UINT:
1230 ret = VK_FORMAT_R64G64_UINT;
1231 break;
1232 case FormatType::kR64_SFLOAT:
1233 ret = VK_FORMAT_R64_SFLOAT;
1234 break;
1235 case FormatType::kR64_SINT:
1236 ret = VK_FORMAT_R64_SINT;
1237 break;
1238 case FormatType::kR64_UINT:
1239 ret = VK_FORMAT_R64_UINT;
1240 break;
1241 case FormatType::kR8G8B8A8_SINT:
1242 ret = VK_FORMAT_R8G8B8A8_SINT;
1243 break;
1244 case FormatType::kR8G8B8A8_SNORM:
1245 ret = VK_FORMAT_R8G8B8A8_SNORM;
1246 break;
1247 case FormatType::kR8G8B8A8_SRGB:
1248 ret = VK_FORMAT_R8G8B8A8_SRGB;
1249 break;
1250 case FormatType::kR8G8B8A8_SSCALED:
1251 ret = VK_FORMAT_R8G8B8A8_SSCALED;
1252 break;
1253 case FormatType::kR8G8B8A8_UINT:
1254 ret = VK_FORMAT_R8G8B8A8_UINT;
1255 break;
1256 case FormatType::kR8G8B8A8_UNORM:
1257 ret = VK_FORMAT_R8G8B8A8_UNORM;
1258 break;
1259 case FormatType::kR8G8B8A8_USCALED:
1260 ret = VK_FORMAT_R8G8B8A8_USCALED;
1261 break;
1262 case FormatType::kR8G8B8_SINT:
1263 ret = VK_FORMAT_R8G8B8_SINT;
1264 break;
1265 case FormatType::kR8G8B8_SNORM:
1266 ret = VK_FORMAT_R8G8B8_SNORM;
1267 break;
1268 case FormatType::kR8G8B8_SRGB:
1269 ret = VK_FORMAT_R8G8B8_SRGB;
1270 break;
1271 case FormatType::kR8G8B8_SSCALED:
1272 ret = VK_FORMAT_R8G8B8_SSCALED;
1273 break;
1274 case FormatType::kR8G8B8_UINT:
1275 ret = VK_FORMAT_R8G8B8_UINT;
1276 break;
1277 case FormatType::kR8G8B8_UNORM:
1278 ret = VK_FORMAT_R8G8B8_UNORM;
1279 break;
1280 case FormatType::kR8G8B8_USCALED:
1281 ret = VK_FORMAT_R8G8B8_USCALED;
1282 break;
1283 case FormatType::kR8G8_SINT:
1284 ret = VK_FORMAT_R8G8_SINT;
1285 break;
1286 case FormatType::kR8G8_SNORM:
1287 ret = VK_FORMAT_R8G8_SNORM;
1288 break;
1289 case FormatType::kR8G8_SRGB:
1290 ret = VK_FORMAT_R8G8_SRGB;
1291 break;
1292 case FormatType::kR8G8_SSCALED:
1293 ret = VK_FORMAT_R8G8_SSCALED;
1294 break;
1295 case FormatType::kR8G8_UINT:
1296 ret = VK_FORMAT_R8G8_UINT;
1297 break;
1298 case FormatType::kR8G8_UNORM:
1299 ret = VK_FORMAT_R8G8_UNORM;
1300 break;
1301 case FormatType::kR8G8_USCALED:
1302 ret = VK_FORMAT_R8G8_USCALED;
1303 break;
1304 case FormatType::kR8_SINT:
1305 ret = VK_FORMAT_R8_SINT;
1306 break;
1307 case FormatType::kR8_SNORM:
1308 ret = VK_FORMAT_R8_SNORM;
1309 break;
1310 case FormatType::kR8_SRGB:
1311 ret = VK_FORMAT_R8_SRGB;
1312 break;
1313 case FormatType::kR8_SSCALED:
1314 ret = VK_FORMAT_R8_SSCALED;
1315 break;
1316 case FormatType::kR8_UINT:
1317 ret = VK_FORMAT_R8_UINT;
1318 break;
1319 case FormatType::kR8_UNORM:
1320 ret = VK_FORMAT_R8_UNORM;
1321 break;
1322 case FormatType::kR8_USCALED:
1323 ret = VK_FORMAT_R8_USCALED;
1324 break;
1325 case FormatType::kS8_UINT:
1326 ret = VK_FORMAT_S8_UINT;
1327 break;
1328 case FormatType::kX8_D24_UNORM_PACK32:
1329 ret = VK_FORMAT_X8_D24_UNORM_PACK32;
1330 break;
1331 }
1332 return ret;
1333 }
1334
IsRequiredSubgroupSizeSupported(const ShaderType type,const uint32_t required_subgroup_size) const1335 bool Device::IsRequiredSubgroupSizeSupported(
1336 const ShaderType type,
1337 const uint32_t required_subgroup_size) const {
1338 VkShaderStageFlagBits stage = {};
1339 switch (type) {
1340 case kShaderTypeGeometry:
1341 stage = VK_SHADER_STAGE_GEOMETRY_BIT;
1342 break;
1343 case kShaderTypeFragment:
1344 stage = VK_SHADER_STAGE_FRAGMENT_BIT;
1345 break;
1346 case kShaderTypeVertex:
1347 stage = VK_SHADER_STAGE_VERTEX_BIT;
1348 break;
1349 case kShaderTypeTessellationControl:
1350 stage = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT;
1351 break;
1352 case kShaderTypeTessellationEvaluation:
1353 stage = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
1354 break;
1355 case kShaderTypeCompute:
1356 stage = VK_SHADER_STAGE_COMPUTE_BIT;
1357 break;
1358 default:
1359 return false;
1360 }
1361 if ((stage & subgroup_size_control_properties_.requiredSubgroupSizeStages) ==
1362 0) {
1363 return false;
1364 }
1365 if (required_subgroup_size == 0 ||
1366 required_subgroup_size <
1367 subgroup_size_control_properties_.minSubgroupSize ||
1368 required_subgroup_size >
1369 subgroup_size_control_properties_.maxSubgroupSize) {
1370 return false;
1371 }
1372
1373 return true;
1374 }
1375
GetMinSubgroupSize() const1376 uint32_t Device::GetMinSubgroupSize() const {
1377 return subgroup_size_control_properties_.minSubgroupSize;
1378 }
1379
GetMaxSubgroupSize() const1380 uint32_t Device::GetMaxSubgroupSize() const {
1381 return subgroup_size_control_properties_.maxSubgroupSize;
1382 }
1383
1384 } // namespace vulkan
1385 } // namespace amber
1386