1 /*
2 * Copyright (c) 2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "gpu_image_vk.h"
17
18 #include <cinttypes>
19 #include <vulkan/vulkan_core.h>
20
21 #include <base/math/mathf.h>
22
23 #if (RENDER_PERF_ENABLED == 1)
24 #include <core/implementation_uids.h>
25 #include <core/perf/intf_performance_data_manager.h>
26 #endif
27
28 #include <render/namespace.h>
29
30 #include "device/device.h"
31 #include "device/gpu_resource_desc_flag_validation.h"
32 #include "util/log.h"
33 #include "vulkan/device_vk.h"
34 #include "vulkan/validate_vk.h"
35
36 RENDER_BEGIN_NAMESPACE()
37 namespace {
38 #if (RENDER_PERF_ENABLED == 1)
RecordAllocation(const int64_t alignedByteSize)39 void RecordAllocation(const int64_t alignedByteSize)
40 {
41 if (auto* inst = CORE_NS::GetInstance<CORE_NS::IPerformanceDataManagerFactory>(CORE_NS::UID_PERFORMANCE_FACTORY);
42 inst) {
43 CORE_NS::IPerformanceDataManager* pdm = inst->Get("Memory");
44 pdm->UpdateData("AllGpuImages", "GPU_IMAGE", alignedByteSize,
45 CORE_NS::IPerformanceDataManager::PerformanceTimingData::DataType::BYTES);
46 }
47 }
48 #endif
49
50 #if (RENDER_VALIDATION_ENABLED == 1)
ValidateFormat(const DevicePlatformDataVk & devicePlat,const GpuImageDesc & desc)51 void ValidateFormat(const DevicePlatformDataVk& devicePlat, const GpuImageDesc& desc)
52 {
53 const VkFormat format = (VkFormat)desc.format;
54 VkFormatProperties formatProperties;
55 vkGetPhysicalDeviceFormatProperties(devicePlat.physicalDevice, // physicalDevice
56 format, // format
57 &formatProperties); // pFormatProperties
58 const VkFormatFeatureFlags optimalTilingFeatureFlags = formatProperties.optimalTilingFeatures;
59 bool valid = true;
60 if (desc.usageFlags & ImageUsageFlagBits::CORE_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
61 if ((optimalTilingFeatureFlags & VkFormatFeatureFlagBits::VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) ==
62 0) {
63 valid = false;
64 }
65 }
66 if (desc.usageFlags & ImageUsageFlagBits::CORE_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) {
67 if ((optimalTilingFeatureFlags & VkFormatFeatureFlagBits::VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) == 0) {
68 valid = false;
69 }
70 }
71 if (desc.usageFlags & ImageUsageFlagBits::CORE_IMAGE_USAGE_STORAGE_BIT) {
72 if ((optimalTilingFeatureFlags & VkFormatFeatureFlagBits::VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT) == 0) {
73 valid = false;
74 }
75 }
76
77 if (valid == false) {
78 PLUGIN_LOG_E("Unsupported image format feature flags (CORE_FORMAT: %u)", desc.format);
79 }
80 }
81 #endif
82
83 constexpr uint32_t IMAGE_VIEW_USAGE_FLAGS {
84 CORE_IMAGE_USAGE_SAMPLED_BIT | CORE_IMAGE_USAGE_STORAGE_BIT | CORE_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
85 CORE_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | CORE_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT |
86 CORE_IMAGE_USAGE_INPUT_ATTACHMENT_BIT | CORE_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT
87 };
88
CheckDepthFormat(const DeviceVk & deviceVk,const BASE_NS::Format format)89 BASE_NS::Format CheckDepthFormat(const DeviceVk& deviceVk, const BASE_NS::Format format)
90 {
91 const auto& devPlat = deviceVk.GetPlatformInternalDataVk();
92 for (const auto& supportedDepthFormat : devPlat.supportedDepthFormats) {
93 if (format == supportedDepthFormat) {
94 return format;
95 }
96 }
97 if (!devPlat.supportedDepthFormats.empty()) {
98 #if (RENDER_VALIDATION_ENABLED == 1)
99 PLUGIN_LOG_W("RENDER_VALIDATION: unsupported depth format (%u), using format (%u)", format,
100 devPlat.supportedDepthFormats[0]);
101 #endif
102 return devPlat.supportedDepthFormats[0];
103 } else {
104 return BASE_NS::Format::BASE_FORMAT_UNDEFINED;
105 }
106 }
107
GetBaseImageViewType(const VkImageViewType imageViewType)108 inline VkImageViewType GetBaseImageViewType(const VkImageViewType imageViewType)
109 {
110 if (imageViewType == VK_IMAGE_VIEW_TYPE_1D_ARRAY) {
111 return VK_IMAGE_VIEW_TYPE_1D;
112 } else if (imageViewType == VK_IMAGE_VIEW_TYPE_2D_ARRAY) {
113 return VK_IMAGE_VIEW_TYPE_2D;
114 } else if (imageViewType == VK_IMAGE_VIEW_TYPE_CUBE_ARRAY) {
115 return VK_IMAGE_VIEW_TYPE_CUBE;
116 } else if (imageViewType == VK_IMAGE_VIEW_TYPE_CUBE) {
117 return VK_IMAGE_VIEW_TYPE_2D;
118 }
119 return imageViewType;
120 }
121
GetPlatMemory(const VmaAllocationInfo & allocationInfo,const VkMemoryPropertyFlags flags)122 inline GpuResourceMemoryVk GetPlatMemory(const VmaAllocationInfo& allocationInfo, const VkMemoryPropertyFlags flags)
123 {
124 return GpuResourceMemoryVk { allocationInfo.deviceMemory, allocationInfo.offset, allocationInfo.size,
125 allocationInfo.pMappedData, allocationInfo.memoryType, flags };
126 }
127
InvalidFboSwizzle(const VkComponentMapping & componentMapping)128 inline bool InvalidFboSwizzle(const VkComponentMapping& componentMapping)
129 {
130 return ((componentMapping.r != VK_COMPONENT_SWIZZLE_IDENTITY) ||
131 (componentMapping.g != VK_COMPONENT_SWIZZLE_IDENTITY) ||
132 (componentMapping.b != VK_COMPONENT_SWIZZLE_IDENTITY) ||
133 (componentMapping.a != VK_COMPONENT_SWIZZLE_IDENTITY));
134 }
135
FillImageDescVk(const GpuImageDesc & desc,GpuImagePlatformDataVk & plat)136 void FillImageDescVk(const GpuImageDesc& desc, GpuImagePlatformDataVk& plat)
137 {
138 plat.format = static_cast<VkFormat>(desc.format);
139 plat.aspectFlags = GpuImageUtilsVk::GetImageAspectFlagsFromFormat(plat.format);
140 plat.usage = static_cast<VkImageUsageFlags>(desc.usageFlags);
141 plat.extent = { desc.width, desc.height, desc.depth };
142 plat.tiling = static_cast<VkImageTiling>(desc.imageTiling);
143 plat.type = static_cast<VkImageType>(desc.imageType);
144 plat.samples = static_cast<VkSampleCountFlagBits>(desc.sampleCountFlags);
145 plat.mipLevels = desc.mipCount;
146 plat.arrayLayers = desc.layerCount;
147 }
148
149 struct ImageInputStruct {
150 VkImage image { VK_NULL_HANDLE };
151 VkFormat format { VK_FORMAT_UNDEFINED };
152 VkComponentMapping componentMapping {};
153 };
154
CreateImageView(const VkDevice device,const VkSamplerYcbcrConversionInfo * ycbcrConversionInfo,const ImageInputStruct & imageInput,const VkImageViewType imageViewType,const VkImageAspectFlags imageAspectFlags,const uint32_t baseMipLevel,const uint32_t levelCount,const uint32_t baseArrayLayer,const uint32_t layerCount)155 VkImageView CreateImageView(const VkDevice device, const VkSamplerYcbcrConversionInfo* ycbcrConversionInfo,
156 const ImageInputStruct& imageInput, const VkImageViewType imageViewType, const VkImageAspectFlags imageAspectFlags,
157 const uint32_t baseMipLevel, const uint32_t levelCount, const uint32_t baseArrayLayer, const uint32_t layerCount)
158 {
159 const VkImageSubresourceRange imageSubresourceRange {
160 imageAspectFlags, // aspectMask
161 baseMipLevel, // baseMipLevel
162 levelCount, // levelCount
163 baseArrayLayer, // baseArrayLayer
164 layerCount // layerCount
165 };
166
167 const VkImageViewCreateInfo imageViewCreateInfo {
168 VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, // sType
169 ycbcrConversionInfo, // pNext
170 0, // flags
171 imageInput.image, // image
172 imageViewType, // viewType
173 imageInput.format, // format
174 imageInput.componentMapping, // components
175 imageSubresourceRange, // subresourceRange
176 };
177
178 VkImageView imageView = VK_NULL_HANDLE;
179 VALIDATE_VK_RESULT(vkCreateImageView(device, // device
180 &imageViewCreateInfo, // pCreateInfo
181 nullptr, // pAllocator
182 &imageView)); // pView
183
184 return imageView;
185 }
186 } // namespace
187
GpuImageVk(Device & device,const GpuImageDesc & desc)188 GpuImageVk::GpuImageVk(Device& device, const GpuImageDesc& desc) : GpuImage(), device_(device), desc_(desc)
189 {
190 PLUGIN_ASSERT_MSG(desc_.memoryPropertyFlags & MemoryPropertyFlagBits::CORE_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
191 "Device local memory is only memory property supported for Vulkan GpuImage (flags: %u)",
192 desc_.memoryPropertyFlags);
193
194 FillImageDescVk(desc_, plat_);
195 if (plat_.aspectFlags & VK_IMAGE_ASPECT_DEPTH_BIT) {
196 desc_.format = CheckDepthFormat((const DeviceVk&)device_, desc_.format);
197 if (desc_.format != desc.format) {
198 plat_.format = static_cast<VkFormat>(desc_.format);
199 plat_.aspectFlags = GpuImageUtilsVk::GetImageAspectFlagsFromFormat(plat_.format);
200 }
201 }
202 #if (RENDER_VALIDATION_ENABLED == 1)
203 ValidateFormat((const DevicePlatformDataVk&)device_.GetPlatformData(), desc_);
204 #endif
205
206 CreateVkImage();
207 if ((desc_.usageFlags & IMAGE_VIEW_USAGE_FLAGS) && plat_.image) {
208 CreateVkImageViews(plat_.aspectFlags, nullptr);
209 }
210
211 #if (RENDER_PERF_ENABLED == 1)
212 RecordAllocation(static_cast<int64_t>(mem_.allocationInfo.size));
213 #endif
214
215 #if (RENDER_DEBUG_GPU_RESOURCE_IDS == 1)
216 PLUGIN_LOG_E("gpu image id >: 0x%" PRIxPTR, (uintptr_t)plat_.image);
217 #endif
218 }
219
GpuImageVk(Device & device,const GpuImageDesc & desc,const GpuImagePlatformData & platformData,const uintptr_t hwBuffer)220 GpuImageVk::GpuImageVk(
221 Device& device, const GpuImageDesc& desc, const GpuImagePlatformData& platformData, const uintptr_t hwBuffer)
222 : device_(device), plat_((const GpuImagePlatformDataVk&)platformData),
223 desc_(hwBuffer ? GetImageDescFromHwBufferDesc(hwBuffer) : desc), ownsResources_(false),
224 ownsImage_(plat_.image ? false : true), ownsImageViews_(plat_.imageView ? false : true)
225 {
226 // with platform data the resources can be created from hwbuffer and/or direct platform images
227 // the destruction happens based on ownsImage_ and ownsImageViews_
228
229 #if (RENDER_VALIDATION_ENABLED == 1)
230 if ((!plat_.image) && (!plat_.imageView) && (!hwBuffer)) {
231 PLUGIN_LOG_W("RENDER_VALIDATION: creating GpuImage without image, imageView, or hwBuffer");
232 }
233 #endif
234 FillImageDescVk(desc_, plat_);
235 // additional image views are not created if initial image view is provided
236 if (plat_.image && !plat_.imageView && (desc_.usageFlags & IMAGE_VIEW_USAGE_FLAGS)) {
237 CreateVkImageViews(plat_.aspectFlags, nullptr);
238 } else if (plat_.imageView) {
239 plat_.imageViewBase = plat_.imageView;
240 }
241 if (hwBuffer) {
242 plat_.platformHwBuffer = hwBuffer;
243 CreatePlatformHwBuffer();
244 }
245 }
246
~GpuImageVk()247 GpuImageVk::~GpuImageVk()
248 {
249 auto destroyImageViews = [](VkDevice device, auto& vec) {
250 for (auto& ref : vec) {
251 vkDestroyImageView(device, // device
252 ref, // imageView
253 nullptr); // pAllocator
254 }
255 vec.clear();
256 };
257 // high level view might own image views
258 const VkDevice device = ((const DevicePlatformDataVk&)device_.GetPlatformData()).device;
259 if (ownsResources_ || ownsImageViews_) {
260 vkDestroyImageView(device, // device
261 plat_.imageView, // imageView
262 nullptr); // pAllocator
263 if (destroyImageViewBase_) {
264 vkDestroyImageView(device, // device
265 plat_.imageViewBase, // imageView
266 nullptr); // pAllocator
267 }
268 destroyImageViews(device, platViews_.mipImageViews);
269 destroyImageViews(device, platViews_.layerImageViews);
270 destroyImageViews(device, platViews_.mipImageAllLayerViews);
271 }
272
273 if (ownsImage_) {
274 #if (RENDER_PERF_ENABLED == 1)
275 RecordAllocation(-static_cast<int64_t>(mem_.allocationInfo.size));
276 #endif
277 #if (RENDER_DEBUG_GPU_RESOURCE_IDS == 1)
278 PLUGIN_LOG_E("gpu image id <: 0x%" PRIxPTR, (uintptr_t)plat_.image);
279 #endif
280 PlatformGpuMemoryAllocator* gpuMemAllocator = device_.GetPlatformGpuMemoryAllocator();
281 PLUGIN_ASSERT(gpuMemAllocator);
282 if (gpuMemAllocator) {
283 gpuMemAllocator->DestroyImage(plat_.image, mem_.allocation);
284 }
285 }
286 if (plat_.platformHwBuffer) {
287 DestroyPlatformHwBuffer();
288 }
289 }
290
CreateVkImage()291 void GpuImageVk::CreateVkImage()
292 {
293 const VkImageCreateInfo imageCreateInfo {
294 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
295 nullptr, // pNext
296 static_cast<VkImageCreateFlags>(desc_.createFlags), // flags
297 plat_.type, // imageType
298 plat_.format, // format
299 plat_.extent, // extent
300 plat_.mipLevels, // mipLevels
301 plat_.arrayLayers, // arrayLayers
302 plat_.samples, // samples
303 plat_.tiling, // tiling
304 plat_.usage, // usage
305 VkSharingMode::VK_SHARING_MODE_EXCLUSIVE, // sharingMode
306 0, // queueFamilyIndexCount
307 nullptr, // pQueueFamilyIndices
308 VkImageLayout::VK_IMAGE_LAYOUT_UNDEFINED, // initialLayout
309 };
310
311 auto memoryPropertyFlags = static_cast<VkMemoryPropertyFlags>(desc_.memoryPropertyFlags);
312 const VkMemoryPropertyFlags requiredFlags =
313 (memoryPropertyFlags & (~(VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT |
314 CORE_MEMORY_PROPERTY_PROTECTED_BIT)));
315 const VkMemoryPropertyFlags preferredFlags = memoryPropertyFlags;
316
317 PlatformGpuMemoryAllocator* gpuMemAllocator = device_.GetPlatformGpuMemoryAllocator();
318 if (gpuMemAllocator) {
319 // can be null handle -> default allocator
320 const VmaPool customPool = gpuMemAllocator->GetImagePool(desc_);
321
322 const VmaAllocationCreateInfo allocationCreateInfo {
323 0, // flags
324 #ifdef USE_NEW_VMA
325 VmaMemoryUsage::VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE, // usage
326 #else
327 VmaMemoryUsage::VMA_MEMORY_USAGE_GPU_ONLY, // usage
328 #endif
329 requiredFlags, // requiredFlags
330 preferredFlags, // preferredFlags
331 0, // memoryTypeBits
332 customPool, // pool
333 nullptr, // pUserData
334 #ifdef USE_NEW_VMA
335 0.f, // priority
336 #endif
337 };
338
339 gpuMemAllocator->CreateImage(
340 imageCreateInfo, allocationCreateInfo, plat_.image, mem_.allocation, mem_.allocationInfo);
341 }
342
343 plat_.memory = GetPlatMemory(mem_.allocationInfo, preferredFlags);
344 }
345
CreateVkImageViews(VkImageAspectFlags imageAspectFlags,const VkSamplerYcbcrConversionInfo * ycbcrConversionInfo)346 void GpuImageVk::CreateVkImageViews(
347 VkImageAspectFlags imageAspectFlags, const VkSamplerYcbcrConversionInfo* ycbcrConversionInfo)
348 {
349 PLUGIN_ASSERT(plat_.image);
350 const VkDevice vkDevice = ((const DevicePlatformDataVk&)device_.GetPlatformData()).device;
351
352 const auto imageViewType = (VkImageViewType)desc_.imageViewType;
353 const VkImageAspectFlags shaderViewImageAspectFlags = imageAspectFlags & (~VK_IMAGE_ASPECT_STENCIL_BIT);
354
355 const VkComponentMapping componentMapping = {
356 (VkComponentSwizzle)desc_.componentMapping.r,
357 (VkComponentSwizzle)desc_.componentMapping.g,
358 (VkComponentSwizzle)desc_.componentMapping.b,
359 (VkComponentSwizzle)desc_.componentMapping.a,
360 };
361
362 const ImageInputStruct imageInput = { plat_.image, plat_.format, componentMapping };
363 // Create basic image view for sampling and general usage
364 plat_.imageView = CreateImageView(vkDevice, ycbcrConversionInfo, imageInput, imageViewType,
365 shaderViewImageAspectFlags, 0, plat_.mipLevels, 0, plat_.arrayLayers);
366 plat_.imageViewBase = plat_.imageView;
367
368 const bool invalidFboSwizzle = InvalidFboSwizzle(componentMapping);
369 const bool notValidImageViewForAttachment = (plat_.mipLevels > 1) || (plat_.arrayLayers > 1) || invalidFboSwizzle;
370 const bool usageNeedsViews = (plat_.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_STORAGE_BIT |
371 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) > 0;
372 const bool separateViewNeeded = (imageAspectFlags != shaderViewImageAspectFlags);
373 if (separateViewNeeded || (usageNeedsViews && notValidImageViewForAttachment)) {
374 destroyImageViewBase_ = true;
375 const VkImageViewType baseImageViewType = GetBaseImageViewType(imageViewType);
376 {
377 ImageInputStruct imageInputIdentity = imageInput;
378 imageInputIdentity.componentMapping = {}; // identity needed for fbo
379 plat_.imageViewBase = CreateImageView(
380 vkDevice, ycbcrConversionInfo, imageInputIdentity, baseImageViewType, imageAspectFlags, 0U, 1U, 0U, 1U);
381 }
382
383 if (plat_.mipLevels > 1) {
384 platViews_.mipImageViews.resize(plat_.mipLevels);
385 if (plat_.arrayLayers > 1U) {
386 platViews_.mipImageAllLayerViews.resize(plat_.mipLevels);
387 }
388 for (uint32_t mipIdx = 0; mipIdx < plat_.mipLevels; ++mipIdx) {
389 platViews_.mipImageViews[mipIdx] = CreateImageView(
390 vkDevice, ycbcrConversionInfo, imageInput, baseImageViewType, imageAspectFlags, mipIdx, 1U, 0U, 1U);
391 if (plat_.arrayLayers > 1U) {
392 platViews_.mipImageAllLayerViews[mipIdx] = CreateImageView(vkDevice, ycbcrConversionInfo,
393 imageInput, VK_IMAGE_VIEW_TYPE_2D_ARRAY, imageAspectFlags, mipIdx, 1U, 0U, plat_.arrayLayers);
394 }
395 }
396 }
397 if (plat_.arrayLayers > 1) {
398 platViews_.layerImageViews.resize(plat_.arrayLayers);
399 for (uint32_t layerIdx = 0; layerIdx < plat_.arrayLayers; ++layerIdx) {
400 platViews_.layerImageViews[layerIdx] = CreateImageView(vkDevice, ycbcrConversionInfo, imageInput,
401 baseImageViewType, imageAspectFlags, 0U, 1U, layerIdx, 1U);
402 }
403 }
404 if (imageViewType == VK_IMAGE_VIEW_TYPE_CUBE) {
405 if (platViews_.mipImageAllLayerViews.empty()) {
406 platViews_.mipImageAllLayerViews.resize(plat_.mipLevels);
407 for (uint32_t mipIdx = 0; mipIdx < plat_.mipLevels; ++mipIdx) {
408 platViews_.mipImageAllLayerViews[mipIdx] = CreateImageView(vkDevice, ycbcrConversionInfo,
409 imageInput, VK_IMAGE_VIEW_TYPE_2D_ARRAY, imageAspectFlags, mipIdx, 1U, 0U, plat_.arrayLayers);
410 }
411 }
412 }
413 }
414 }
415
GetDesc() const416 const GpuImageDesc& GpuImageVk::GetDesc() const
417 {
418 return desc_;
419 }
420
GetBasePlatformData() const421 const GpuImagePlatformData& GpuImageVk::GetBasePlatformData() const
422 {
423 return plat_;
424 }
425
GetPlatformData() const426 const GpuImagePlatformDataVk& GpuImageVk::GetPlatformData() const
427 {
428 return plat_;
429 }
430
GetPlatformDataViews() const431 const GpuImagePlatformDataViewsVk& GpuImageVk::GetPlatformDataViews() const
432 {
433 return platViews_;
434 }
435
GetPlaformDataConversion() const436 const GpuImagePlatformDataConversion& GpuImageVk::GetPlaformDataConversion() const
437 {
438 return platConversion_;
439 }
440
GetAdditionalFlags() const441 GpuImage::AdditionalFlags GpuImageVk::GetAdditionalFlags() const
442 {
443 return (platConversion_.samplerConversion) ? ADDITIONAL_PLATFORM_CONVERSION_BIT : 0u;
444 }
445
446 namespace GpuImageUtilsVk {
GetImageAspectFlagsFromFormat(const VkFormat format)447 VkImageAspectFlags GetImageAspectFlagsFromFormat(const VkFormat format)
448 {
449 VkImageAspectFlags flags {};
450
451 const bool isDepthFormat =
452 ((format == VkFormat::VK_FORMAT_D16_UNORM) || (format == VkFormat::VK_FORMAT_X8_D24_UNORM_PACK32) ||
453 (format == VkFormat::VK_FORMAT_D32_SFLOAT) || (format == VkFormat::VK_FORMAT_D16_UNORM_S8_UINT) ||
454 (format == VkFormat::VK_FORMAT_D24_UNORM_S8_UINT) || (format == VkFormat::VK_FORMAT_D32_SFLOAT_S8_UINT));
455 if (isDepthFormat) {
456 flags |= VkImageAspectFlagBits::VK_IMAGE_ASPECT_DEPTH_BIT;
457
458 const bool isStencilFormat =
459 ((format == VkFormat::VK_FORMAT_S8_UINT) || (format == VkFormat::VK_FORMAT_D16_UNORM_S8_UINT) ||
460 (format == VkFormat::VK_FORMAT_D24_UNORM_S8_UINT) ||
461 (format == VkFormat::VK_FORMAT_D32_SFLOAT_S8_UINT));
462 if (isStencilFormat) {
463 flags |= VkImageAspectFlagBits::VK_IMAGE_ASPECT_STENCIL_BIT;
464 }
465
466 } else if (format == VkFormat::VK_FORMAT_S8_UINT) {
467 flags |= VkImageAspectFlagBits::VK_IMAGE_ASPECT_STENCIL_BIT;
468 } else {
469 flags |= VkImageAspectFlagBits::VK_IMAGE_ASPECT_COLOR_BIT;
470 }
471
472 return flags;
473 }
474 } // namespace GpuImageUtilsVk
475 RENDER_END_NAMESPACE()
476