1 /*
2 * Copyright (c) 2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "gpu_image_vk.h"
17
18 #include <cinttypes>
19 #include <vulkan/vulkan_core.h>
20
21 #include <base/math/mathf.h>
22
23 #if (RENDER_PERF_ENABLED == 1)
24 #include <core/implementation_uids.h>
25 #include <core/perf/intf_performance_data_manager.h>
26 #endif
27
28 #include <render/namespace.h>
29
30 #include "device/device.h"
31 #include "device/gpu_resource_desc_flag_validation.h"
32 #include "util/log.h"
33 #include "vulkan/device_vk.h"
34 #include "vulkan/validate_vk.h"
35
36 RENDER_BEGIN_NAMESPACE()
37 namespace {
38 #if (RENDER_PERF_ENABLED == 1)
RecordAllocation(const int64_t alignedByteSize)39 void RecordAllocation(const int64_t alignedByteSize)
40 {
41 if (auto* inst = CORE_NS::GetInstance<CORE_NS::IPerformanceDataManagerFactory>(CORE_NS::UID_PERFORMANCE_FACTORY);
42 inst) {
43 CORE_NS::IPerformanceDataManager* pdm = inst->Get("Memory");
44 pdm->UpdateData("AllGpuImages", "GPU_IMAGE", alignedByteSize);
45 }
46 }
47 #endif
48
49 #if (RENDER_VALIDATION_ENABLED == 1)
ValidateFormat(const DevicePlatformDataVk & devicePlat,const GpuImageDesc & desc)50 void ValidateFormat(const DevicePlatformDataVk& devicePlat, const GpuImageDesc& desc)
51 {
52 const VkFormat format = (VkFormat)desc.format;
53 VkFormatProperties formatProperties;
54 vkGetPhysicalDeviceFormatProperties(devicePlat.physicalDevice, // physicalDevice
55 format, // format
56 &formatProperties); // pFormatProperties
57 const VkFormatFeatureFlags optimalTilingFeatureFlags = formatProperties.optimalTilingFeatures;
58 bool valid = true;
59 if (desc.usageFlags & ImageUsageFlagBits::CORE_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
60 if ((optimalTilingFeatureFlags & VkFormatFeatureFlagBits::VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) ==
61 0) {
62 valid = false;
63 }
64 }
65 if (desc.usageFlags & ImageUsageFlagBits::CORE_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) {
66 if ((optimalTilingFeatureFlags & VkFormatFeatureFlagBits::VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) == 0) {
67 valid = false;
68 }
69 }
70 if (desc.usageFlags & ImageUsageFlagBits::CORE_IMAGE_USAGE_STORAGE_BIT) {
71 if ((optimalTilingFeatureFlags & VkFormatFeatureFlagBits::VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT) == 0) {
72 valid = false;
73 }
74 }
75
76 if (valid == false) {
77 PLUGIN_LOG_E("Unsupported image format feature flags (CORE_FORMAT: %u)", desc.format);
78 }
79 }
80 #endif
81
82 constexpr uint32_t IMAGE_VIEW_USAGE_FLAGS {
83 CORE_IMAGE_USAGE_SAMPLED_BIT | CORE_IMAGE_USAGE_STORAGE_BIT | CORE_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
84 CORE_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | CORE_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT |
85 CORE_IMAGE_USAGE_INPUT_ATTACHMENT_BIT | CORE_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT
86 };
87
CheckDepthFormat(const DeviceVk & deviceVk,const BASE_NS::Format format)88 BASE_NS::Format CheckDepthFormat(const DeviceVk& deviceVk, const BASE_NS::Format format)
89 {
90 const auto& devPlat = deviceVk.GetPlatformInternalDataVk();
91 for (const auto& supportedDepthFormat : devPlat.supportedDepthFormats) {
92 if (format == supportedDepthFormat) {
93 return format;
94 }
95 }
96 if (!devPlat.supportedDepthFormats.empty()) {
97 #if (RENDER_VALIDATION_ENABLED == 1)
98 PLUGIN_LOG_W("RENDER_VALIDATION: unsupported depth format (%u), using format (%u)", format,
99 devPlat.supportedDepthFormats[0]);
100 #endif
101 return devPlat.supportedDepthFormats[0];
102 } else {
103 return BASE_NS::Format::BASE_FORMAT_UNDEFINED;
104 }
105 }
106
GetBaseImageViewType(const VkImageViewType imageViewType)107 inline VkImageViewType GetBaseImageViewType(const VkImageViewType imageViewType)
108 {
109 if (imageViewType == VK_IMAGE_VIEW_TYPE_1D_ARRAY) {
110 return VK_IMAGE_VIEW_TYPE_1D;
111 } else if (imageViewType == VK_IMAGE_VIEW_TYPE_2D_ARRAY) {
112 return VK_IMAGE_VIEW_TYPE_2D;
113 } else if (imageViewType == VK_IMAGE_VIEW_TYPE_CUBE_ARRAY) {
114 return VK_IMAGE_VIEW_TYPE_CUBE;
115 } else if (imageViewType == VK_IMAGE_VIEW_TYPE_CUBE) {
116 return VK_IMAGE_VIEW_TYPE_2D;
117 }
118 return imageViewType;
119 }
120
GetPlatMemory(const VmaAllocationInfo & allocationInfo,const VkMemoryPropertyFlags flags)121 inline GpuResourceMemoryVk GetPlatMemory(const VmaAllocationInfo& allocationInfo, const VkMemoryPropertyFlags flags)
122 {
123 return GpuResourceMemoryVk { allocationInfo.deviceMemory, allocationInfo.offset, allocationInfo.size,
124 allocationInfo.pMappedData, allocationInfo.memoryType, flags };
125 }
126
InvalidFboSwizzle(const VkComponentMapping & componentMapping)127 inline bool InvalidFboSwizzle(const VkComponentMapping& componentMapping)
128 {
129 return ((componentMapping.r != VK_COMPONENT_SWIZZLE_IDENTITY) ||
130 (componentMapping.g != VK_COMPONENT_SWIZZLE_IDENTITY) ||
131 (componentMapping.b != VK_COMPONENT_SWIZZLE_IDENTITY) ||
132 (componentMapping.a != VK_COMPONENT_SWIZZLE_IDENTITY));
133 }
134
FillImageDescVk(const GpuImageDesc & desc,GpuImagePlatformDataVk & plat)135 void FillImageDescVk(const GpuImageDesc& desc, GpuImagePlatformDataVk& plat)
136 {
137 plat.format = static_cast<VkFormat>(desc.format);
138 plat.aspectFlags = GpuImageUtilsVk::GetImageAspectFlagsFromFormat(plat.format);
139 plat.usage = static_cast<VkImageUsageFlags>(desc.usageFlags);
140 plat.extent = { desc.width, desc.height, desc.depth };
141 plat.tiling = static_cast<VkImageTiling>(desc.imageTiling);
142 plat.type = static_cast<VkImageType>(desc.imageType);
143 plat.samples = static_cast<VkSampleCountFlagBits>(desc.sampleCountFlags);
144 plat.mipLevels = desc.mipCount;
145 plat.arrayLayers = desc.layerCount;
146 }
147
148 struct ImageInputStruct {
149 VkImage image { VK_NULL_HANDLE };
150 VkFormat format { VK_FORMAT_UNDEFINED };
151 VkComponentMapping componentMapping {};
152 };
153
CreateImageView(const VkDevice device,const VkSamplerYcbcrConversionInfo * ycbcrConversionInfo,const ImageInputStruct & imageInput,const VkImageViewType imageViewType,const VkImageAspectFlags imageAspectFlags,const uint32_t baseMipLevel,const uint32_t levelCount,const uint32_t baseArrayLayer,const uint32_t layerCount)154 VkImageView CreateImageView(const VkDevice device, const VkSamplerYcbcrConversionInfo* ycbcrConversionInfo,
155 const ImageInputStruct& imageInput, const VkImageViewType imageViewType, const VkImageAspectFlags imageAspectFlags,
156 const uint32_t baseMipLevel, const uint32_t levelCount, const uint32_t baseArrayLayer, const uint32_t layerCount)
157 {
158 const VkImageSubresourceRange imageSubresourceRange {
159 imageAspectFlags, // aspectMask
160 baseMipLevel, // baseMipLevel
161 levelCount, // levelCount
162 baseArrayLayer, // baseArrayLayer
163 layerCount // layerCount
164 };
165
166 const VkImageViewCreateInfo imageViewCreateInfo {
167 VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, // sType
168 ycbcrConversionInfo, // pNext
169 0, // flags
170 imageInput.image, // image
171 imageViewType, // viewType
172 imageInput.format, // format
173 imageInput.componentMapping, // components
174 imageSubresourceRange, // subresourceRange
175 };
176
177 VkImageView imageView = VK_NULL_HANDLE;
178 VALIDATE_VK_RESULT(vkCreateImageView(device, // device
179 &imageViewCreateInfo, // pCreateInfo
180 nullptr, // pAllocator
181 &imageView)); // pView
182
183 return imageView;
184 }
185 } // namespace
186
GpuImageVk(Device & device,const GpuImageDesc & desc)187 GpuImageVk::GpuImageVk(Device& device, const GpuImageDesc& desc) : GpuImage(), device_(device), desc_(desc)
188 {
189 PLUGIN_ASSERT_MSG(desc_.memoryPropertyFlags & MemoryPropertyFlagBits::CORE_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
190 "Device local memory is only memory property supported for Vulkan GpuImage (flags: %u)",
191 desc_.memoryPropertyFlags);
192
193 FillImageDescVk(desc_, plat_);
194 if (plat_.aspectFlags & VK_IMAGE_ASPECT_DEPTH_BIT) {
195 desc_.format = CheckDepthFormat((const DeviceVk&)device_, desc_.format);
196 if (desc_.format != desc.format) {
197 plat_.format = static_cast<VkFormat>(desc_.format);
198 plat_.aspectFlags = GpuImageUtilsVk::GetImageAspectFlagsFromFormat(plat_.format);
199 }
200 }
201 #if (RENDER_VALIDATION_ENABLED == 1)
202 ValidateFormat((const DevicePlatformDataVk&)device_.GetPlatformData(), desc_);
203 #endif
204
205 CreateVkImage();
206 if ((desc_.usageFlags & IMAGE_VIEW_USAGE_FLAGS) && plat_.image) {
207 CreateVkImageViews(plat_.aspectFlags, nullptr);
208 }
209
210 #if (RENDER_PERF_ENABLED == 1)
211 RecordAllocation(static_cast<int64_t>(mem_.allocationInfo.size));
212 #endif
213
214 #if (RENDER_DEBUG_GPU_RESOURCE_IDS == 1)
215 PLUGIN_LOG_E("gpu image id >: 0x%" PRIxPTR, (uintptr_t)plat_.image);
216 #endif
217 }
218
GpuImageVk(Device & device,const GpuImageDesc & desc,const GpuImagePlatformData & platformData,const uintptr_t hwBuffer)219 GpuImageVk::GpuImageVk(
220 Device& device, const GpuImageDesc& desc, const GpuImagePlatformData& platformData, const uintptr_t hwBuffer)
221 : device_(device), plat_((const GpuImagePlatformDataVk&)platformData),
222 desc_(hwBuffer ? GetImageDescFromHwBufferDesc(hwBuffer) : desc), hwBuffer_(hwBuffer), ownsResources_(false)
223 {
224 #if (RENDER_VALIDATION_ENABLED == 1)
225 if (!plat_.image && !plat_.imageView && hwBuffer) {
226 PLUGIN_LOG_W("RENDER_VALIDATION: creating GpuImage without image, imageView, or hwBuffer");
227 }
228 #endif
229 FillImageDescVk(desc_, plat_);
230 if (plat_.image && !plat_.imageView && (desc_.usageFlags & IMAGE_VIEW_USAGE_FLAGS)) {
231 CreateVkImageViews(plat_.aspectFlags, nullptr);
232 } else if (plat_.imageView) {
233 plat_.imageViewBase = plat_.imageView;
234 }
235 if (hwBuffer) {
236 CreatePlatformHwBuffer();
237 }
238 }
239
~GpuImageVk()240 GpuImageVk::~GpuImageVk()
241 {
242 auto destroyImageViews = [](VkDevice device, auto& vec) {
243 for (auto& ref : vec) {
244 vkDestroyImageView(device, // device
245 ref, // imageView
246 nullptr); // pAllocator
247 }
248 vec.clear();
249 };
250 // hw buffer variant creates image views and needs to destroy those as well
251 const VkDevice device = ((const DevicePlatformDataVk&)device_.GetPlatformData()).device;
252 if (ownsResources_ || (hwBuffer_ != 0)) {
253 vkDestroyImageView(device, // device
254 plat_.imageView, // imageView
255 nullptr); // pAllocator
256 if (destroyImageViewBase_) {
257 vkDestroyImageView(device, // device
258 plat_.imageViewBase, // imageView
259 nullptr); // pAllocator
260 }
261 destroyImageViews(device, platViews_.mipImageViews);
262 destroyImageViews(device, platViews_.layerImageViews);
263 destroyImageViews(device, platViews_.mipImageAllLayerViews);
264 }
265
266 if (ownsResources_) {
267 #if (RENDER_PERF_ENABLED == 1)
268 RecordAllocation(-static_cast<int64_t>(mem_.allocationInfo.size));
269 #endif
270 #if (RENDER_DEBUG_GPU_RESOURCE_IDS == 1)
271 PLUGIN_LOG_E("gpu image id <: 0x%" PRIxPTR, (uintptr_t)plat_.image);
272 #endif
273 PlatformGpuMemoryAllocator* gpuMemAllocator = device_.GetPlatformGpuMemoryAllocator();
274 PLUGIN_ASSERT(gpuMemAllocator);
275 if (gpuMemAllocator) {
276 gpuMemAllocator->DestroyImage(plat_.image, mem_.allocation);
277 }
278 } else if (hwBuffer_ != 0) {
279 DestroyPlatformHwBuffer();
280 }
281 }
282
CreateVkImage()283 void GpuImageVk::CreateVkImage()
284 {
285 const VkImageCreateInfo imageCreateInfo {
286 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
287 nullptr, // pNext
288 static_cast<VkImageCreateFlags>(desc_.createFlags), // flags
289 plat_.type, // imageType
290 plat_.format, // format
291 plat_.extent, // extent
292 plat_.mipLevels, // mipLevels
293 plat_.arrayLayers, // arrayLayers
294 plat_.samples, // samples
295 plat_.tiling, // tiling
296 plat_.usage, // usage
297 VkSharingMode::VK_SHARING_MODE_EXCLUSIVE, // sharingMode
298 0, // queueFamilyIndexCount
299 nullptr, // pQueueFamilyIndices
300 VkImageLayout::VK_IMAGE_LAYOUT_UNDEFINED, // initialLayout
301 };
302
303 VkMemoryPropertyFlags memoryPropertyFlags = static_cast<VkMemoryPropertyFlags>(desc_.memoryPropertyFlags);
304 const VkMemoryPropertyFlags requiredFlags =
305 (memoryPropertyFlags & (~(VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT |
306 CORE_MEMORY_PROPERTY_PROTECTED_BIT)));
307 const VkMemoryPropertyFlags preferredFlags = memoryPropertyFlags;
308
309 PlatformGpuMemoryAllocator* gpuMemAllocator = device_.GetPlatformGpuMemoryAllocator();
310 if (gpuMemAllocator) {
311 // can be null handle -> default allocator
312 const VmaPool customPool = gpuMemAllocator->GetImagePool(desc_);
313
314 const VmaAllocationCreateInfo allocationCreateInfo {
315 0, // flags
316 #ifdef USE_NEW_VMA
317 VmaMemoryUsage::VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE, // usage
318 #else
319 VmaMemoryUsage::VMA_MEMORY_USAGE_GPU_ONLY, // usage
320 #endif
321 requiredFlags, // requiredFlags
322 preferredFlags, // preferredFlags
323 0, // memoryTypeBits
324 customPool, // pool
325 nullptr, // pUserData
326 #ifdef USE_NEW_VMA
327 0.f, // priority
328 #endif
329 };
330
331 gpuMemAllocator->CreateImage(
332 imageCreateInfo, allocationCreateInfo, plat_.image, mem_.allocation, mem_.allocationInfo);
333 }
334
335 plat_.memory = GetPlatMemory(mem_.allocationInfo, preferredFlags);
336 }
337
CreateVkImageViews(VkImageAspectFlags imageAspectFlags,const VkSamplerYcbcrConversionInfo * ycbcrConversionInfo)338 void GpuImageVk::CreateVkImageViews(
339 VkImageAspectFlags imageAspectFlags, const VkSamplerYcbcrConversionInfo* ycbcrConversionInfo)
340 {
341 PLUGIN_ASSERT(plat_.image);
342 const VkDevice vkDevice = ((const DevicePlatformDataVk&)device_.GetPlatformData()).device;
343
344 const VkImageViewType imageViewType = (VkImageViewType)desc_.imageViewType;
345 const VkImageAspectFlags shaderViewImageAspectFlags = imageAspectFlags & (~VK_IMAGE_ASPECT_STENCIL_BIT);
346
347 const VkComponentMapping componentMapping = {
348 (VkComponentSwizzle)desc_.componentMapping.r,
349 (VkComponentSwizzle)desc_.componentMapping.g,
350 (VkComponentSwizzle)desc_.componentMapping.b,
351 (VkComponentSwizzle)desc_.componentMapping.a,
352 };
353
354 const ImageInputStruct imageInput = { plat_.image, plat_.format, componentMapping };
355 // Create basic image view for sampling and general usage
356 plat_.imageView = CreateImageView(vkDevice, ycbcrConversionInfo, imageInput, imageViewType,
357 shaderViewImageAspectFlags, 0, plat_.mipLevels, 0, plat_.arrayLayers);
358 plat_.imageViewBase = plat_.imageView;
359
360 const bool invalidFboSwizzle = InvalidFboSwizzle(componentMapping);
361 const bool notValidImageViewForAttachment = (plat_.mipLevels > 1) || (plat_.arrayLayers > 1) || invalidFboSwizzle;
362 const bool usageNeedsViews = (plat_.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_STORAGE_BIT |
363 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) > 0;
364 const bool separateViewNeeded = (imageAspectFlags != shaderViewImageAspectFlags);
365 if (separateViewNeeded || (usageNeedsViews && notValidImageViewForAttachment)) {
366 destroyImageViewBase_ = true;
367 const VkImageViewType baseImageViewType = GetBaseImageViewType(imageViewType);
368 {
369 ImageInputStruct imageInputIdentity = imageInput;
370 imageInputIdentity.componentMapping = {}; // identity needed for fbo
371 plat_.imageViewBase = CreateImageView(
372 vkDevice, ycbcrConversionInfo, imageInputIdentity, baseImageViewType, imageAspectFlags, 0U, 1U, 0U, 1U);
373 }
374
375 if (plat_.mipLevels > 1) {
376 platViews_.mipImageViews.resize(plat_.mipLevels);
377 if (plat_.arrayLayers > 1U) {
378 platViews_.mipImageAllLayerViews.resize(plat_.mipLevels);
379 }
380 for (uint32_t mipIdx = 0; mipIdx < plat_.mipLevels; ++mipIdx) {
381 platViews_.mipImageViews[mipIdx] = CreateImageView(
382 vkDevice, ycbcrConversionInfo, imageInput, baseImageViewType, imageAspectFlags, mipIdx, 1U, 0U, 1U);
383 if (plat_.arrayLayers > 1U) {
384 platViews_.mipImageAllLayerViews[mipIdx] = CreateImageView(vkDevice, ycbcrConversionInfo,
385 imageInput, VK_IMAGE_VIEW_TYPE_2D_ARRAY, imageAspectFlags, mipIdx, 1U, 0U, plat_.arrayLayers);
386 }
387 }
388 }
389 if (plat_.arrayLayers > 1) {
390 platViews_.layerImageViews.resize(plat_.arrayLayers);
391 for (uint32_t layerIdx = 0; layerIdx < plat_.arrayLayers; ++layerIdx) {
392 platViews_.layerImageViews[layerIdx] = CreateImageView(vkDevice, ycbcrConversionInfo, imageInput,
393 baseImageViewType, imageAspectFlags, 0U, 1U, layerIdx, 1U);
394 }
395 }
396 if (imageViewType == VK_IMAGE_VIEW_TYPE_CUBE) {
397 if (platViews_.mipImageAllLayerViews.empty()) {
398 platViews_.mipImageAllLayerViews.resize(plat_.mipLevels);
399 for (uint32_t mipIdx = 0; mipIdx < plat_.mipLevels; ++mipIdx) {
400 platViews_.mipImageAllLayerViews[mipIdx] = CreateImageView(vkDevice, ycbcrConversionInfo,
401 imageInput, VK_IMAGE_VIEW_TYPE_2D_ARRAY, imageAspectFlags, mipIdx, 1U, 0U, plat_.arrayLayers);
402 }
403 }
404 }
405 }
406 }
407
GetDesc() const408 const GpuImageDesc& GpuImageVk::GetDesc() const
409 {
410 return desc_;
411 }
412
GetBasePlatformData() const413 const GpuImagePlatformData& GpuImageVk::GetBasePlatformData() const
414 {
415 return plat_;
416 }
417
GetPlatformData() const418 const GpuImagePlatformDataVk& GpuImageVk::GetPlatformData() const
419 {
420 return plat_;
421 }
422
GetPlatformDataViews() const423 const GpuImagePlatformDataViewsVk& GpuImageVk::GetPlatformDataViews() const
424 {
425 return platViews_;
426 }
427
GetPlaformDataConversion() const428 const GpuImagePlatformDataConversion& GpuImageVk::GetPlaformDataConversion() const
429 {
430 return platConversion_;
431 }
432
GetAdditionalFlags() const433 GpuImage::AdditionalFlags GpuImageVk::GetAdditionalFlags() const
434 {
435 return (platConversion_.samplerConversion) ? ADDITIONAL_PLATFORM_CONVERSION_BIT : 0u;
436 }
437
438 namespace GpuImageUtilsVk {
GetImageAspectFlagsFromFormat(const VkFormat format)439 VkImageAspectFlags GetImageAspectFlagsFromFormat(const VkFormat format)
440 {
441 VkImageAspectFlags flags {};
442
443 const bool isDepthFormat =
444 ((format == VkFormat::VK_FORMAT_D16_UNORM) || (format == VkFormat::VK_FORMAT_X8_D24_UNORM_PACK32) ||
445 (format == VkFormat::VK_FORMAT_D32_SFLOAT) || (format == VkFormat::VK_FORMAT_D16_UNORM_S8_UINT) ||
446 (format == VkFormat::VK_FORMAT_D24_UNORM_S8_UINT))
447 ? true
448 : false;
449 if (isDepthFormat) {
450 flags |= VkImageAspectFlagBits::VK_IMAGE_ASPECT_DEPTH_BIT;
451
452 const bool isStencilFormat =
453 ((format == VkFormat::VK_FORMAT_S8_UINT) || (format == VkFormat::VK_FORMAT_D16_UNORM_S8_UINT) ||
454 (format == VkFormat::VK_FORMAT_D24_UNORM_S8_UINT) || (format == VkFormat::VK_FORMAT_D32_SFLOAT_S8_UINT))
455 ? true
456 : false;
457 if (isStencilFormat) {
458 flags |= VkImageAspectFlagBits::VK_IMAGE_ASPECT_STENCIL_BIT;
459 }
460
461 } else if (format == VkFormat::VK_FORMAT_S8_UINT) {
462 flags |= VkImageAspectFlagBits::VK_IMAGE_ASPECT_STENCIL_BIT;
463 } else {
464 flags |= VkImageAspectFlagBits::VK_IMAGE_ASPECT_COLOR_BIT;
465 }
466
467 return flags;
468 }
469 } // namespace GpuImageUtilsVk
470 RENDER_END_NAMESPACE()
471