1 /* Copyright (c) 2015-2019 The Khronos Group Inc.
2 * Copyright (c) 2015-2019 Valve Corporation
3 * Copyright (c) 2015-2019 LunarG, Inc.
4 * Copyright (C) 2015-2019 Google Inc.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Mark Lobodzinski <mark@lunarg.com>
19 * Author: Dave Houlton <daveh@lunarg.com>
20 * Shannon McPherson <shannon@lunarg.com>
21 */
22
23 // Allow use of STL min and max functions in Windows
24 #define NOMINMAX
25
26 #include <cmath>
27 #include <set>
28 #include <sstream>
29 #include <string>
30
31 #include "vk_enum_string_helper.h"
32 #include "vk_layer_data.h"
33 #include "vk_layer_utils.h"
34 #include "vk_layer_logging.h"
35 #include "vk_typemap_helper.h"
36
37 #include "chassis.h"
38 #include "core_validation.h"
39 #include "shader_validation.h"
40 #include "descriptor_sets.h"
41 #include "buffer_validation.h"
42
FullMipChainLevels(uint32_t height,uint32_t width,uint32_t depth)43 uint32_t FullMipChainLevels(uint32_t height, uint32_t width, uint32_t depth) {
44 // uint cast applies floor()
45 return 1u + (uint32_t)log2(std::max({height, width, depth}));
46 }
47
FullMipChainLevels(VkExtent3D extent)48 uint32_t FullMipChainLevels(VkExtent3D extent) { return FullMipChainLevels(extent.height, extent.width, extent.depth); }
49
FullMipChainLevels(VkExtent2D extent)50 uint32_t FullMipChainLevels(VkExtent2D extent) { return FullMipChainLevels(extent.height, extent.width); }
51
SetLayout(layer_data * device_data,GLOBAL_CB_NODE * pCB,ImageSubresourcePair imgpair,const VkImageLayout & layout)52 void CoreChecks::SetLayout(layer_data *device_data, GLOBAL_CB_NODE *pCB, ImageSubresourcePair imgpair,
53 const VkImageLayout &layout) {
54 auto it = pCB->imageLayoutMap.find(imgpair);
55 if (it != pCB->imageLayoutMap.end()) {
56 it->second.layout = layout;
57 } else {
58 assert(imgpair.hasSubresource);
59 IMAGE_CMD_BUF_LAYOUT_NODE node;
60 if (!FindCmdBufLayout(device_data, pCB, imgpair.image, imgpair.subresource, node)) {
61 node.initialLayout = layout;
62 }
63 SetLayout(device_data, pCB, imgpair, {node.initialLayout, layout});
64 }
65 }
66
67 template <class OBJECT, class LAYOUT>
SetLayout(layer_data * device_data,OBJECT * pObject,VkImage image,VkImageSubresource range,const LAYOUT & layout)68 void CoreChecks::SetLayout(layer_data *device_data, OBJECT *pObject, VkImage image, VkImageSubresource range,
69 const LAYOUT &layout) {
70 ImageSubresourcePair imgpair = {image, true, range};
71 SetLayout(device_data, pObject, imgpair, layout, VK_IMAGE_ASPECT_COLOR_BIT);
72 SetLayout(device_data, pObject, imgpair, layout, VK_IMAGE_ASPECT_DEPTH_BIT);
73 SetLayout(device_data, pObject, imgpair, layout, VK_IMAGE_ASPECT_STENCIL_BIT);
74 SetLayout(device_data, pObject, imgpair, layout, VK_IMAGE_ASPECT_METADATA_BIT);
75 if (GetDeviceExtensions()->vk_khr_sampler_ycbcr_conversion) {
76 SetLayout(device_data, pObject, imgpair, layout, VK_IMAGE_ASPECT_PLANE_0_BIT_KHR);
77 SetLayout(device_data, pObject, imgpair, layout, VK_IMAGE_ASPECT_PLANE_1_BIT_KHR);
78 SetLayout(device_data, pObject, imgpair, layout, VK_IMAGE_ASPECT_PLANE_2_BIT_KHR);
79 }
80 }
81
82 template <class OBJECT, class LAYOUT>
SetLayout(layer_data * device_data,OBJECT * pObject,ImageSubresourcePair imgpair,const LAYOUT & layout,VkImageAspectFlags aspectMask)83 void CoreChecks::SetLayout(layer_data *device_data, OBJECT *pObject, ImageSubresourcePair imgpair, const LAYOUT &layout,
84 VkImageAspectFlags aspectMask) {
85 if (imgpair.subresource.aspectMask & aspectMask) {
86 imgpair.subresource.aspectMask = aspectMask;
87 SetLayout(device_data, pObject, imgpair, layout);
88 }
89 }
90
91 // Set the layout in supplied map
SetLayout(std::unordered_map<ImageSubresourcePair,IMAGE_LAYOUT_NODE> & imageLayoutMap,ImageSubresourcePair imgpair,VkImageLayout layout)92 void CoreChecks::SetLayout(std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> &imageLayoutMap,
93 ImageSubresourcePair imgpair, VkImageLayout layout) {
94 auto it = imageLayoutMap.find(imgpair);
95 if (it != imageLayoutMap.end()) {
96 it->second.layout = layout; // Update
97 } else {
98 imageLayoutMap[imgpair].layout = layout; // Insert
99 }
100 }
101
FindLayoutVerifyNode(layer_data const * device_data,GLOBAL_CB_NODE const * pCB,ImageSubresourcePair imgpair,IMAGE_CMD_BUF_LAYOUT_NODE & node,const VkImageAspectFlags aspectMask)102 bool CoreChecks::FindLayoutVerifyNode(layer_data const *device_data, GLOBAL_CB_NODE const *pCB, ImageSubresourcePair imgpair,
103 IMAGE_CMD_BUF_LAYOUT_NODE &node, const VkImageAspectFlags aspectMask) {
104 if (!(imgpair.subresource.aspectMask & aspectMask)) {
105 return false;
106 }
107 VkImageAspectFlags oldAspectMask = imgpair.subresource.aspectMask;
108 imgpair.subresource.aspectMask = aspectMask;
109 auto imgsubIt = pCB->imageLayoutMap.find(imgpair);
110 if (imgsubIt == pCB->imageLayoutMap.end()) {
111 return false;
112 }
113 if (node.layout != VK_IMAGE_LAYOUT_MAX_ENUM && node.layout != imgsubIt->second.layout) {
114 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(imgpair.image),
115 kVUID_Core_DrawState_InvalidLayout,
116 "Cannot query for VkImage %s layout when combined aspect mask %d has multiple layout types: %s and %s",
117 report_data->FormatHandle(imgpair.image).c_str(), oldAspectMask, string_VkImageLayout(node.layout),
118 string_VkImageLayout(imgsubIt->second.layout));
119 }
120 if (node.initialLayout != VK_IMAGE_LAYOUT_MAX_ENUM && node.initialLayout != imgsubIt->second.initialLayout) {
121 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(imgpair.image),
122 kVUID_Core_DrawState_InvalidLayout,
123 "Cannot query for VkImage %s"
124 " layout when combined aspect mask %d has multiple initial layout types: %s and %s",
125 report_data->FormatHandle(imgpair.image).c_str(), oldAspectMask, string_VkImageLayout(node.initialLayout),
126 string_VkImageLayout(imgsubIt->second.initialLayout));
127 }
128 node = imgsubIt->second;
129 return true;
130 }
131
FindLayoutVerifyLayout(layer_data const * device_data,ImageSubresourcePair imgpair,VkImageLayout & layout,const VkImageAspectFlags aspectMask)132 bool CoreChecks::FindLayoutVerifyLayout(layer_data const *device_data, ImageSubresourcePair imgpair, VkImageLayout &layout,
133 const VkImageAspectFlags aspectMask) {
134 if (!(imgpair.subresource.aspectMask & aspectMask)) {
135 return false;
136 }
137 VkImageAspectFlags oldAspectMask = imgpair.subresource.aspectMask;
138 imgpair.subresource.aspectMask = aspectMask;
139 auto imgsubIt = (*GetImageLayoutMap()).find(imgpair);
140 if (imgsubIt == (*GetImageLayoutMap()).end()) {
141 return false;
142 }
143 if (layout != VK_IMAGE_LAYOUT_MAX_ENUM && layout != imgsubIt->second.layout) {
144 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(imgpair.image),
145 kVUID_Core_DrawState_InvalidLayout,
146 "Cannot query for VkImage %s layout when combined aspect mask %d has multiple layout types: %s and %s",
147 report_data->FormatHandle(imgpair.image).c_str(), oldAspectMask, string_VkImageLayout(layout),
148 string_VkImageLayout(imgsubIt->second.layout));
149 }
150 layout = imgsubIt->second.layout;
151 return true;
152 }
153
154 // Find layout(s) on the command buffer level
FindCmdBufLayout(layer_data const * device_data,GLOBAL_CB_NODE const * pCB,VkImage image,VkImageSubresource range,IMAGE_CMD_BUF_LAYOUT_NODE & node)155 bool CoreChecks::FindCmdBufLayout(layer_data const *device_data, GLOBAL_CB_NODE const *pCB, VkImage image, VkImageSubresource range,
156 IMAGE_CMD_BUF_LAYOUT_NODE &node) {
157 ImageSubresourcePair imgpair = {image, true, range};
158 node = IMAGE_CMD_BUF_LAYOUT_NODE(VK_IMAGE_LAYOUT_MAX_ENUM, VK_IMAGE_LAYOUT_MAX_ENUM);
159 FindLayoutVerifyNode(device_data, pCB, imgpair, node, VK_IMAGE_ASPECT_COLOR_BIT);
160 FindLayoutVerifyNode(device_data, pCB, imgpair, node, VK_IMAGE_ASPECT_DEPTH_BIT);
161 FindLayoutVerifyNode(device_data, pCB, imgpair, node, VK_IMAGE_ASPECT_STENCIL_BIT);
162 FindLayoutVerifyNode(device_data, pCB, imgpair, node, VK_IMAGE_ASPECT_METADATA_BIT);
163 if (GetDeviceExtensions()->vk_khr_sampler_ycbcr_conversion) {
164 FindLayoutVerifyNode(device_data, pCB, imgpair, node, VK_IMAGE_ASPECT_PLANE_0_BIT_KHR);
165 FindLayoutVerifyNode(device_data, pCB, imgpair, node, VK_IMAGE_ASPECT_PLANE_1_BIT_KHR);
166 FindLayoutVerifyNode(device_data, pCB, imgpair, node, VK_IMAGE_ASPECT_PLANE_2_BIT_KHR);
167 }
168 if (node.layout == VK_IMAGE_LAYOUT_MAX_ENUM) {
169 imgpair = {image, false, VkImageSubresource()};
170 auto imgsubIt = pCB->imageLayoutMap.find(imgpair);
171 if (imgsubIt == pCB->imageLayoutMap.end()) return false;
172 // TODO: This is ostensibly a find function but it changes state here
173 node = imgsubIt->second;
174 }
175 return true;
176 }
177
178 // Find layout(s) on the global level
FindGlobalLayout(layer_data * device_data,ImageSubresourcePair imgpair,VkImageLayout & layout)179 bool CoreChecks::FindGlobalLayout(layer_data *device_data, ImageSubresourcePair imgpair, VkImageLayout &layout) {
180 layout = VK_IMAGE_LAYOUT_MAX_ENUM;
181 FindLayoutVerifyLayout(device_data, imgpair, layout, VK_IMAGE_ASPECT_COLOR_BIT);
182 FindLayoutVerifyLayout(device_data, imgpair, layout, VK_IMAGE_ASPECT_DEPTH_BIT);
183 FindLayoutVerifyLayout(device_data, imgpair, layout, VK_IMAGE_ASPECT_STENCIL_BIT);
184 FindLayoutVerifyLayout(device_data, imgpair, layout, VK_IMAGE_ASPECT_METADATA_BIT);
185 if (GetDeviceExtensions()->vk_khr_sampler_ycbcr_conversion) {
186 FindLayoutVerifyLayout(device_data, imgpair, layout, VK_IMAGE_ASPECT_PLANE_0_BIT_KHR);
187 FindLayoutVerifyLayout(device_data, imgpair, layout, VK_IMAGE_ASPECT_PLANE_1_BIT_KHR);
188 FindLayoutVerifyLayout(device_data, imgpair, layout, VK_IMAGE_ASPECT_PLANE_2_BIT_KHR);
189 }
190 if (layout == VK_IMAGE_LAYOUT_MAX_ENUM) {
191 imgpair = {imgpair.image, false, VkImageSubresource()};
192 auto imgsubIt = (*GetImageLayoutMap()).find(imgpair);
193 if (imgsubIt == (*GetImageLayoutMap()).end()) return false;
194 layout = imgsubIt->second.layout;
195 }
196 return true;
197 }
198
FindLayouts(layer_data * device_data,VkImage image,std::vector<VkImageLayout> & layouts)199 bool CoreChecks::FindLayouts(layer_data *device_data, VkImage image, std::vector<VkImageLayout> &layouts) {
200 auto sub_data = (*GetImageSubresourceMap()).find(image);
201 if (sub_data == (*GetImageSubresourceMap()).end()) return false;
202 auto image_state = GetImageState(image);
203 if (!image_state) return false;
204 bool ignoreGlobal = false;
205 // TODO: Make this robust for >1 aspect mask. Now it will just say ignore potential errors in this case.
206 if (sub_data->second.size() >= (image_state->createInfo.arrayLayers * image_state->createInfo.mipLevels + 1)) {
207 ignoreGlobal = true;
208 }
209 for (auto imgsubpair : sub_data->second) {
210 if (ignoreGlobal && !imgsubpair.hasSubresource) continue;
211 auto img_data = (*GetImageLayoutMap()).find(imgsubpair);
212 if (img_data != (*GetImageLayoutMap()).end()) {
213 layouts.push_back(img_data->second.layout);
214 }
215 }
216 return true;
217 }
218
FindLayout(const std::unordered_map<ImageSubresourcePair,IMAGE_LAYOUT_NODE> & imageLayoutMap,ImageSubresourcePair imgpair,VkImageLayout & layout,const VkImageAspectFlags aspectMask)219 bool CoreChecks::FindLayout(const std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> &imageLayoutMap,
220 ImageSubresourcePair imgpair, VkImageLayout &layout, const VkImageAspectFlags aspectMask) {
221 if (!(imgpair.subresource.aspectMask & aspectMask)) {
222 return false;
223 }
224 imgpair.subresource.aspectMask = aspectMask;
225 auto imgsubIt = imageLayoutMap.find(imgpair);
226 if (imgsubIt == imageLayoutMap.end()) {
227 return false;
228 }
229 layout = imgsubIt->second.layout;
230 return true;
231 }
232
233 // find layout in supplied map
FindLayout(layer_data * device_data,const std::unordered_map<ImageSubresourcePair,IMAGE_LAYOUT_NODE> & imageLayoutMap,ImageSubresourcePair imgpair,VkImageLayout & layout)234 bool CoreChecks::FindLayout(layer_data *device_data,
235 const std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> &imageLayoutMap,
236 ImageSubresourcePair imgpair, VkImageLayout &layout) {
237 layout = VK_IMAGE_LAYOUT_MAX_ENUM;
238 FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_COLOR_BIT);
239 FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_DEPTH_BIT);
240 FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_STENCIL_BIT);
241 FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_METADATA_BIT);
242 if (GetDeviceExtensions()->vk_khr_sampler_ycbcr_conversion) {
243 FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_PLANE_0_BIT_KHR);
244 FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_PLANE_1_BIT_KHR);
245 FindLayout(imageLayoutMap, imgpair, layout, VK_IMAGE_ASPECT_PLANE_2_BIT_KHR);
246 }
247 // Image+subresource not found, look for image handle w/o subresource
248 if (layout == VK_IMAGE_LAYOUT_MAX_ENUM) {
249 imgpair = {imgpair.image, false, VkImageSubresource()};
250 auto imgsubIt = imageLayoutMap.find(imgpair);
251 if (imgsubIt == imageLayoutMap.end()) return false;
252 layout = imgsubIt->second.layout;
253 }
254 return true;
255 }
256
257 // Set the layout on the global level
SetGlobalLayout(layer_data * device_data,ImageSubresourcePair imgpair,const VkImageLayout & layout)258 void CoreChecks::SetGlobalLayout(layer_data *device_data, ImageSubresourcePair imgpair, const VkImageLayout &layout) {
259 VkImage &image = imgpair.image;
260 auto &lmap = (*GetImageLayoutMap());
261 auto data = lmap.find(imgpair);
262 if (data != lmap.end()) {
263 data->second.layout = layout; // Update
264 } else {
265 lmap[imgpair].layout = layout; // Insert
266 }
267 auto &image_subresources = (*GetImageSubresourceMap())[image];
268 auto subresource = std::find(image_subresources.begin(), image_subresources.end(), imgpair);
269 if (subresource == image_subresources.end()) {
270 image_subresources.push_back(imgpair);
271 }
272 }
273
274 // Set the layout on the cmdbuf level
SetLayout(layer_data * device_data,GLOBAL_CB_NODE * pCB,ImageSubresourcePair imgpair,const IMAGE_CMD_BUF_LAYOUT_NODE & node)275 void CoreChecks::SetLayout(layer_data *device_data, GLOBAL_CB_NODE *pCB, ImageSubresourcePair imgpair,
276 const IMAGE_CMD_BUF_LAYOUT_NODE &node) {
277 auto it = pCB->imageLayoutMap.find(imgpair);
278 if (it != pCB->imageLayoutMap.end()) {
279 it->second = node; // Update
280 } else {
281 pCB->imageLayoutMap[imgpair] = node; // Insert
282 }
283 }
284 // Set image layout for given VkImageSubresourceRange struct
SetImageLayout(layer_data * device_data,GLOBAL_CB_NODE * cb_node,const IMAGE_STATE * image_state,VkImageSubresourceRange image_subresource_range,const VkImageLayout & layout)285 void CoreChecks::SetImageLayout(layer_data *device_data, GLOBAL_CB_NODE *cb_node, const IMAGE_STATE *image_state,
286 VkImageSubresourceRange image_subresource_range, const VkImageLayout &layout) {
287 assert(image_state);
288 cb_node->image_layout_change_count++; // Change the version of this data to force revalidation
289 for (uint32_t level_index = 0; level_index < image_subresource_range.levelCount; ++level_index) {
290 uint32_t level = image_subresource_range.baseMipLevel + level_index;
291 for (uint32_t layer_index = 0; layer_index < image_subresource_range.layerCount; layer_index++) {
292 uint32_t layer = image_subresource_range.baseArrayLayer + layer_index;
293 VkImageSubresource sub = {image_subresource_range.aspectMask, level, layer};
294 // TODO: If ImageView was created with depth or stencil, transition both layouts as the aspectMask is ignored and both
295 // are used. Verify that the extra implicit layout is OK for descriptor set layout validation
296 if (image_subresource_range.aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) {
297 if (FormatIsDepthAndStencil(image_state->createInfo.format)) {
298 sub.aspectMask |= (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT);
299 }
300 }
301 // For multiplane images, IMAGE_ASPECT_COLOR is an alias for all of the plane bits
302 if (GetDeviceExtensions()->vk_khr_sampler_ycbcr_conversion) {
303 if (FormatIsMultiplane(image_state->createInfo.format)) {
304 if (sub.aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) {
305 sub.aspectMask &= ~VK_IMAGE_ASPECT_COLOR_BIT;
306 sub.aspectMask |= VK_IMAGE_ASPECT_PLANE_0_BIT_KHR | VK_IMAGE_ASPECT_PLANE_1_BIT_KHR;
307 if (FormatPlaneCount(image_state->createInfo.format) > 2) {
308 sub.aspectMask |= VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
309 }
310 }
311 }
312 }
313 SetLayout(device_data, cb_node, image_state->image, sub, layout);
314 }
315 }
316 }
317 // Set image layout for given VkImageSubresourceLayers struct
SetImageLayout(layer_data * device_data,GLOBAL_CB_NODE * cb_node,const IMAGE_STATE * image_state,VkImageSubresourceLayers image_subresource_layers,const VkImageLayout & layout)318 void CoreChecks::SetImageLayout(layer_data *device_data, GLOBAL_CB_NODE *cb_node, const IMAGE_STATE *image_state,
319 VkImageSubresourceLayers image_subresource_layers, const VkImageLayout &layout) {
320 // Transfer VkImageSubresourceLayers into VkImageSubresourceRange struct
321 VkImageSubresourceRange image_subresource_range;
322 image_subresource_range.aspectMask = image_subresource_layers.aspectMask;
323 image_subresource_range.baseArrayLayer = image_subresource_layers.baseArrayLayer;
324 image_subresource_range.layerCount = image_subresource_layers.layerCount;
325 image_subresource_range.baseMipLevel = image_subresource_layers.mipLevel;
326 image_subresource_range.levelCount = 1;
327 SetImageLayout(device_data, cb_node, image_state, image_subresource_range, layout);
328 }
329
330 // Set image layout for all slices of an image view
SetImageViewLayout(layer_data * device_data,GLOBAL_CB_NODE * cb_node,IMAGE_VIEW_STATE * view_state,const VkImageLayout & layout)331 void CoreChecks::SetImageViewLayout(layer_data *device_data, GLOBAL_CB_NODE *cb_node, IMAGE_VIEW_STATE *view_state,
332 const VkImageLayout &layout) {
333 assert(view_state);
334
335 IMAGE_STATE *image_state = GetImageState(view_state->create_info.image);
336 VkImageSubresourceRange sub_range = view_state->create_info.subresourceRange;
337
338 // When changing the layout of a 3D image subresource via a 2D or 2D_ARRRAY image view, all depth slices of
339 // the subresource mip level(s) are transitioned, ignoring any layers restriction in the subresource info.
340 if ((image_state->createInfo.imageType == VK_IMAGE_TYPE_3D) && (view_state->create_info.viewType != VK_IMAGE_VIEW_TYPE_3D)) {
341 sub_range.baseArrayLayer = 0;
342 sub_range.layerCount = image_state->createInfo.extent.depth;
343 }
344
345 SetImageLayout(device_data, cb_node, image_state, sub_range, layout);
346 }
347
SetImageViewLayout(layer_data * device_data,GLOBAL_CB_NODE * cb_node,VkImageView imageView,const VkImageLayout & layout)348 void CoreChecks::SetImageViewLayout(layer_data *device_data, GLOBAL_CB_NODE *cb_node, VkImageView imageView,
349 const VkImageLayout &layout) {
350 auto view_state = GetImageViewState(imageView);
351 SetImageViewLayout(device_data, cb_node, view_state, layout);
352 }
353
ValidateRenderPassLayoutAgainstFramebufferImageUsage(layer_data * device_data,RenderPassCreateVersion rp_version,VkImageLayout layout,VkImage image,VkImageView image_view,VkFramebuffer framebuffer,VkRenderPass renderpass,uint32_t attachment_index,const char * variable_name)354 bool CoreChecks::ValidateRenderPassLayoutAgainstFramebufferImageUsage(layer_data *device_data, RenderPassCreateVersion rp_version,
355 VkImageLayout layout, VkImage image, VkImageView image_view,
356 VkFramebuffer framebuffer, VkRenderPass renderpass,
357 uint32_t attachment_index, const char *variable_name) {
358 bool skip = false;
359 auto image_state = GetImageState(image);
360 const char *vuid;
361 const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
362
363 if (!image_state) {
364 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image),
365 "VUID-VkRenderPassBeginInfo-framebuffer-parameter",
366 "Render Pass begin with renderpass %s uses framebuffer %s where pAttachments[%" PRIu32
367 "] = image view %s, which refers to an invalid image",
368 report_data->FormatHandle(renderpass).c_str(), report_data->FormatHandle(framebuffer).c_str(),
369 attachment_index, report_data->FormatHandle(image_view).c_str());
370 return skip;
371 }
372
373 auto image_usage = image_state->createInfo.usage;
374
375 // Check for layouts that mismatch image usages in the framebuffer
376 if (layout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL && !(image_usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)) {
377 vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03094" : "VUID-vkCmdBeginRenderPass-initialLayout-00895";
378 skip |=
379 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image), vuid,
380 "Layout/usage mismatch for attachment %u in render pass %s"
381 " - the %s is %s but the image attached to framebuffer %s via image view %s"
382 " was not created with VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT",
383 attachment_index, report_data->FormatHandle(framebuffer).c_str(), variable_name, string_VkImageLayout(layout),
384 report_data->FormatHandle(renderpass).c_str(), report_data->FormatHandle(image_view).c_str());
385 }
386
387 if (layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL &&
388 !(image_usage & (VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT))) {
389 vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03097" : "VUID-vkCmdBeginRenderPass-initialLayout-00897";
390 skip |=
391 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image), vuid,
392 "Layout/usage mismatch for attachment %u in render pass %s"
393 " - the %s is %s but the image attached to framebuffer %s via image view %s"
394 " was not created with VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT or VK_IMAGE_USAGE_SAMPLED_BIT",
395 attachment_index, report_data->FormatHandle(framebuffer).c_str(), variable_name, string_VkImageLayout(layout),
396 report_data->FormatHandle(renderpass).c_str(), report_data->FormatHandle(image_view).c_str());
397 }
398
399 if (layout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL && !(image_usage & VK_IMAGE_USAGE_TRANSFER_SRC_BIT)) {
400 vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03098" : "VUID-vkCmdBeginRenderPass-initialLayout-00898";
401 skip |=
402 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image), vuid,
403 "Layout/usage mismatch for attachment %u in render pass %s"
404 " - the %s is %s but the image attached to framebuffer %s via image view %s"
405 " was not created with VK_IMAGE_USAGE_TRANSFER_SRC_BIT",
406 attachment_index, report_data->FormatHandle(framebuffer).c_str(), variable_name, string_VkImageLayout(layout),
407 report_data->FormatHandle(renderpass).c_str(), report_data->FormatHandle(image_view).c_str());
408 }
409
410 if (layout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL && !(image_usage & VK_IMAGE_USAGE_TRANSFER_DST_BIT)) {
411 vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03099" : "VUID-vkCmdBeginRenderPass-initialLayout-00899";
412 skip |=
413 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image), vuid,
414 "Layout/usage mismatch for attachment %u in render pass %s"
415 " - the %s is %s but the image attached to framebuffer %s via image view %s"
416 " was not created with VK_IMAGE_USAGE_TRANSFER_DST_BIT",
417 attachment_index, report_data->FormatHandle(framebuffer).c_str(), variable_name, string_VkImageLayout(layout),
418 report_data->FormatHandle(renderpass).c_str(), report_data->FormatHandle(image_view).c_str());
419 }
420
421 if (GetDeviceExtensions()->vk_khr_maintenance2) {
422 if ((layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL ||
423 layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL ||
424 layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL ||
425 layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL) &&
426 !(image_usage & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
427 vuid = use_rp2 ? "VUID-vkCmdBeginRenderPass2KHR-initialLayout-03096" : "VUID-vkCmdBeginRenderPass-initialLayout-01758";
428 skip |= log_msg(
429 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image), vuid,
430 "Layout/usage mismatch for attachment %u in render pass %s"
431 " - the %s is %s but the image attached to framebuffer %s via image view %s"
432 " was not created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT",
433 attachment_index, report_data->FormatHandle(framebuffer).c_str(), variable_name, string_VkImageLayout(layout),
434 report_data->FormatHandle(renderpass).c_str(), report_data->FormatHandle(image_view).c_str());
435 }
436 } else {
437 // The create render pass 2 extension requires maintenance 2 (the previous branch), so no vuid switch needed here.
438 if ((layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL ||
439 layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL) &&
440 !(image_usage & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
441 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
442 HandleToUint64(image), "VUID-vkCmdBeginRenderPass-initialLayout-00896",
443 "Layout/usage mismatch for attachment %u in render pass %s"
444 " - the %s is %s but the image attached to framebuffer %s via image view %s"
445 " was not created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT",
446 attachment_index, report_data->FormatHandle(framebuffer).c_str(), variable_name,
447 string_VkImageLayout(layout), report_data->FormatHandle(renderpass).c_str(),
448 report_data->FormatHandle(image_view).c_str());
449 }
450 }
451 return skip;
452 }
453
VerifyFramebufferAndRenderPassLayouts(layer_data * device_data,RenderPassCreateVersion rp_version,GLOBAL_CB_NODE * pCB,const VkRenderPassBeginInfo * pRenderPassBegin,const FRAMEBUFFER_STATE * framebuffer_state)454 bool CoreChecks::VerifyFramebufferAndRenderPassLayouts(layer_data *device_data, RenderPassCreateVersion rp_version,
455 GLOBAL_CB_NODE *pCB, const VkRenderPassBeginInfo *pRenderPassBegin,
456 const FRAMEBUFFER_STATE *framebuffer_state) {
457 bool skip = false;
458 auto const pRenderPassInfo = GetRenderPassState(pRenderPassBegin->renderPass)->createInfo.ptr();
459 auto const &framebufferInfo = framebuffer_state->createInfo;
460
461 auto render_pass = GetRenderPassState(pRenderPassBegin->renderPass)->renderPass;
462 auto framebuffer = framebuffer_state->framebuffer;
463
464 if (pRenderPassInfo->attachmentCount != framebufferInfo.attachmentCount) {
465 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
466 HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_InvalidRenderpass,
467 "You cannot start a render pass using a framebuffer with a different number of attachments.");
468 }
469 for (uint32_t i = 0; i < pRenderPassInfo->attachmentCount; ++i) {
470 const VkImageView &image_view = framebufferInfo.pAttachments[i];
471 auto view_state = GetImageViewState(image_view);
472
473 if (!view_state) {
474 skip |=
475 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
476 HandleToUint64(pRenderPassBegin->renderPass), "VUID-VkRenderPassBeginInfo-framebuffer-parameter",
477 "vkCmdBeginRenderPass(): framebuffer %s pAttachments[%" PRIu32 "] = %s is not a valid VkImageView handle",
478 report_data->FormatHandle(framebuffer_state->framebuffer).c_str(), i,
479 report_data->FormatHandle(image_view).c_str());
480 continue;
481 }
482
483 const VkImage &image = view_state->create_info.image;
484 const VkImageSubresourceRange &subRange = view_state->create_info.subresourceRange;
485 auto initial_layout = pRenderPassInfo->pAttachments[i].initialLayout;
486 auto final_layout = pRenderPassInfo->pAttachments[i].finalLayout;
487
488 // TODO: Do not iterate over every possibility - consolidate where possible
489 for (uint32_t j = 0; j < subRange.levelCount; j++) {
490 uint32_t level = subRange.baseMipLevel + j;
491 for (uint32_t k = 0; k < subRange.layerCount; k++) {
492 uint32_t layer = subRange.baseArrayLayer + k;
493 VkImageSubresource sub = {subRange.aspectMask, level, layer};
494 IMAGE_CMD_BUF_LAYOUT_NODE node;
495 if (!FindCmdBufLayout(device_data, pCB, image, sub, node)) {
496 // Missing layouts will be added during state update
497 continue;
498 }
499 if (initial_layout != VK_IMAGE_LAYOUT_UNDEFINED && initial_layout != node.layout) {
500 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
501 kVUID_Core_DrawState_InvalidRenderpass,
502 "You cannot start a render pass using attachment %u where the render pass initial layout is %s "
503 "and the previous known layout of the attachment is %s. The layouts must match, or the render "
504 "pass initial layout for the attachment must be VK_IMAGE_LAYOUT_UNDEFINED",
505 i, string_VkImageLayout(initial_layout), string_VkImageLayout(node.layout));
506 }
507 }
508 }
509
510 ValidateRenderPassLayoutAgainstFramebufferImageUsage(device_data, rp_version, initial_layout, image, image_view,
511 framebuffer, render_pass, i, "initial layout");
512
513 ValidateRenderPassLayoutAgainstFramebufferImageUsage(device_data, rp_version, final_layout, image, image_view, framebuffer,
514 render_pass, i, "final layout");
515 }
516
517 for (uint32_t j = 0; j < pRenderPassInfo->subpassCount; ++j) {
518 auto &subpass = pRenderPassInfo->pSubpasses[j];
519 for (uint32_t k = 0; k < pRenderPassInfo->pSubpasses[j].inputAttachmentCount; ++k) {
520 auto &attachment_ref = subpass.pInputAttachments[k];
521 if (attachment_ref.attachment != VK_ATTACHMENT_UNUSED) {
522 auto image_view = framebufferInfo.pAttachments[attachment_ref.attachment];
523 auto view_state = GetImageViewState(image_view);
524
525 if (view_state) {
526 auto image = view_state->create_info.image;
527 ValidateRenderPassLayoutAgainstFramebufferImageUsage(device_data, rp_version, attachment_ref.layout, image,
528 image_view, framebuffer, render_pass,
529 attachment_ref.attachment, "input attachment layout");
530 }
531 }
532 }
533
534 for (uint32_t k = 0; k < pRenderPassInfo->pSubpasses[j].colorAttachmentCount; ++k) {
535 auto &attachment_ref = subpass.pColorAttachments[k];
536 if (attachment_ref.attachment != VK_ATTACHMENT_UNUSED) {
537 auto image_view = framebufferInfo.pAttachments[attachment_ref.attachment];
538 auto view_state = GetImageViewState(image_view);
539
540 if (view_state) {
541 auto image = view_state->create_info.image;
542 ValidateRenderPassLayoutAgainstFramebufferImageUsage(device_data, rp_version, attachment_ref.layout, image,
543 image_view, framebuffer, render_pass,
544 attachment_ref.attachment, "color attachment layout");
545 if (subpass.pResolveAttachments) {
546 ValidateRenderPassLayoutAgainstFramebufferImageUsage(
547 device_data, rp_version, attachment_ref.layout, image, image_view, framebuffer, render_pass,
548 attachment_ref.attachment, "resolve attachment layout");
549 }
550 }
551 }
552 }
553
554 if (pRenderPassInfo->pSubpasses[j].pDepthStencilAttachment) {
555 auto &attachment_ref = *subpass.pDepthStencilAttachment;
556 if (attachment_ref.attachment != VK_ATTACHMENT_UNUSED) {
557 auto image_view = framebufferInfo.pAttachments[attachment_ref.attachment];
558 auto view_state = GetImageViewState(image_view);
559
560 if (view_state) {
561 auto image = view_state->create_info.image;
562 ValidateRenderPassLayoutAgainstFramebufferImageUsage(device_data, rp_version, attachment_ref.layout, image,
563 image_view, framebuffer, render_pass,
564 attachment_ref.attachment, "input attachment layout");
565 }
566 }
567 }
568 }
569 return skip;
570 }
571
TransitionAttachmentRefLayout(layer_data * device_data,GLOBAL_CB_NODE * pCB,FRAMEBUFFER_STATE * pFramebuffer,const safe_VkAttachmentReference2KHR & ref)572 void CoreChecks::TransitionAttachmentRefLayout(layer_data *device_data, GLOBAL_CB_NODE *pCB, FRAMEBUFFER_STATE *pFramebuffer,
573 const safe_VkAttachmentReference2KHR &ref) {
574 if (ref.attachment != VK_ATTACHMENT_UNUSED) {
575 auto image_view = GetAttachmentImageViewState(pFramebuffer, ref.attachment);
576 if (image_view) {
577 SetImageViewLayout(device_data, pCB, image_view, ref.layout);
578 }
579 }
580 }
581
TransitionSubpassLayouts(layer_data * device_data,GLOBAL_CB_NODE * pCB,const RENDER_PASS_STATE * render_pass_state,const int subpass_index,FRAMEBUFFER_STATE * framebuffer_state)582 void CoreChecks::TransitionSubpassLayouts(layer_data *device_data, GLOBAL_CB_NODE *pCB, const RENDER_PASS_STATE *render_pass_state,
583 const int subpass_index, FRAMEBUFFER_STATE *framebuffer_state) {
584 assert(render_pass_state);
585
586 if (framebuffer_state) {
587 auto const &subpass = render_pass_state->createInfo.pSubpasses[subpass_index];
588 for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
589 TransitionAttachmentRefLayout(device_data, pCB, framebuffer_state, subpass.pInputAttachments[j]);
590 }
591 for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
592 TransitionAttachmentRefLayout(device_data, pCB, framebuffer_state, subpass.pColorAttachments[j]);
593 }
594 if (subpass.pDepthStencilAttachment) {
595 TransitionAttachmentRefLayout(device_data, pCB, framebuffer_state, *subpass.pDepthStencilAttachment);
596 }
597 }
598 }
599
ValidateImageAspectLayout(layer_data * device_data,GLOBAL_CB_NODE const * pCB,const VkImageMemoryBarrier * mem_barrier,uint32_t level,uint32_t layer,VkImageAspectFlags aspect)600 bool CoreChecks::ValidateImageAspectLayout(layer_data *device_data, GLOBAL_CB_NODE const *pCB,
601 const VkImageMemoryBarrier *mem_barrier, uint32_t level, uint32_t layer,
602 VkImageAspectFlags aspect) {
603 if (!(mem_barrier->subresourceRange.aspectMask & aspect)) {
604 return false;
605 }
606 VkImageSubresource sub = {aspect, level, layer};
607 IMAGE_CMD_BUF_LAYOUT_NODE node;
608 if (!FindCmdBufLayout(device_data, pCB, mem_barrier->image, sub, node)) {
609 return false;
610 }
611 bool skip = false;
612 if (mem_barrier->oldLayout == VK_IMAGE_LAYOUT_UNDEFINED) {
613 // TODO: Set memory invalid which is in mem_tracker currently
614 } else if (node.layout != mem_barrier->oldLayout) {
615 skip = log_msg(
616 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
617 HandleToUint64(pCB->commandBuffer), "VUID-VkImageMemoryBarrier-oldLayout-01197",
618 "For image %s you cannot transition the layout of aspect=%d level=%d layer=%d from %s when current layout is %s.",
619 report_data->FormatHandle(mem_barrier->image).c_str(), aspect, level, layer,
620 string_VkImageLayout(mem_barrier->oldLayout), string_VkImageLayout(node.layout));
621 }
622 return skip;
623 }
624
625 // Transition the layout state for renderpass attachments based on the BeginRenderPass() call. This includes:
626 // 1. Transition into initialLayout state
627 // 2. Transition from initialLayout to layout used in subpass 0
TransitionBeginRenderPassLayouts(layer_data * device_data,GLOBAL_CB_NODE * cb_state,const RENDER_PASS_STATE * render_pass_state,FRAMEBUFFER_STATE * framebuffer_state)628 void CoreChecks::TransitionBeginRenderPassLayouts(layer_data *device_data, GLOBAL_CB_NODE *cb_state,
629 const RENDER_PASS_STATE *render_pass_state,
630 FRAMEBUFFER_STATE *framebuffer_state) {
631 // First transition into initialLayout
632 auto const rpci = render_pass_state->createInfo.ptr();
633 for (uint32_t i = 0; i < rpci->attachmentCount; ++i) {
634 auto view_state = GetAttachmentImageViewState(framebuffer_state, i);
635 if (view_state) {
636 SetImageViewLayout(device_data, cb_state, view_state, rpci->pAttachments[i].initialLayout);
637 }
638 }
639 // Now transition for first subpass (index 0)
640 TransitionSubpassLayouts(device_data, cb_state, render_pass_state, 0, framebuffer_state);
641 }
642
TransitionImageAspectLayout(layer_data * device_data,GLOBAL_CB_NODE * pCB,const VkImageMemoryBarrier * mem_barrier,uint32_t level,uint32_t layer,VkImageAspectFlags aspect_mask,VkImageAspectFlags aspect)643 void CoreChecks::TransitionImageAspectLayout(layer_data *device_data, GLOBAL_CB_NODE *pCB, const VkImageMemoryBarrier *mem_barrier,
644 uint32_t level, uint32_t layer, VkImageAspectFlags aspect_mask,
645 VkImageAspectFlags aspect) {
646 if (!(aspect_mask & aspect)) {
647 return;
648 }
649 VkImageSubresource sub = {aspect, level, layer};
650 IMAGE_CMD_BUF_LAYOUT_NODE node;
651 if (!FindCmdBufLayout(device_data, pCB, mem_barrier->image, sub, node)) {
652 pCB->image_layout_change_count++; // Change the version of this data to force revalidation
653 SetLayout(device_data, pCB, mem_barrier->image, sub,
654 IMAGE_CMD_BUF_LAYOUT_NODE(mem_barrier->oldLayout, mem_barrier->newLayout));
655 return;
656 }
657 if (mem_barrier->oldLayout == VK_IMAGE_LAYOUT_UNDEFINED) {
658 // TODO: Set memory invalid
659 }
660 SetLayout(device_data, pCB, mem_barrier->image, sub, mem_barrier->newLayout);
661 }
662
VerifyAspectsPresent(VkImageAspectFlags aspect_mask,VkFormat format)663 bool VerifyAspectsPresent(VkImageAspectFlags aspect_mask, VkFormat format) {
664 if ((aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) != 0) {
665 if (!(FormatIsColor(format) || FormatIsMultiplane(format))) return false;
666 }
667 if ((aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) != 0) {
668 if (!FormatHasDepth(format)) return false;
669 }
670 if ((aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) != 0) {
671 if (!FormatHasStencil(format)) return false;
672 }
673 if (0 !=
674 (aspect_mask & (VK_IMAGE_ASPECT_PLANE_0_BIT_KHR | VK_IMAGE_ASPECT_PLANE_1_BIT_KHR | VK_IMAGE_ASPECT_PLANE_2_BIT_KHR))) {
675 if (FormatPlaneCount(format) == 1) return false;
676 }
677 return true;
678 }
679
680 // Verify an ImageMemoryBarrier's old/new ImageLayouts are compatible with the Image's ImageUsageFlags.
ValidateBarrierLayoutToImageUsage(layer_data * device_data,const VkImageMemoryBarrier * img_barrier,bool new_not_old,VkImageUsageFlags usage_flags,const char * func_name)681 bool CoreChecks::ValidateBarrierLayoutToImageUsage(layer_data *device_data, const VkImageMemoryBarrier *img_barrier,
682 bool new_not_old, VkImageUsageFlags usage_flags, const char *func_name) {
683 bool skip = false;
684 const VkImageLayout layout = (new_not_old) ? img_barrier->newLayout : img_barrier->oldLayout;
685 const char *msg_code = kVUIDUndefined; // sentinel value meaning "no error"
686
687 switch (layout) {
688 case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
689 if ((usage_flags & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) == 0) {
690 msg_code = "VUID-VkImageMemoryBarrier-oldLayout-01208";
691 }
692 break;
693 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
694 if ((usage_flags & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) == 0) {
695 msg_code = "VUID-VkImageMemoryBarrier-oldLayout-01209";
696 }
697 break;
698 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
699 if ((usage_flags & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) == 0) {
700 msg_code = "VUID-VkImageMemoryBarrier-oldLayout-01210";
701 }
702 break;
703 case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
704 if ((usage_flags & (VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT)) == 0) {
705 msg_code = "VUID-VkImageMemoryBarrier-oldLayout-01211";
706 }
707 break;
708 case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
709 if ((usage_flags & VK_IMAGE_USAGE_TRANSFER_SRC_BIT) == 0) {
710 msg_code = "VUID-VkImageMemoryBarrier-oldLayout-01212";
711 }
712 break;
713 case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
714 if ((usage_flags & VK_IMAGE_USAGE_TRANSFER_DST_BIT) == 0) {
715 msg_code = "VUID-VkImageMemoryBarrier-oldLayout-01213";
716 }
717 break;
718 case VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV:
719 if ((usage_flags & VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV) == 0) {
720 msg_code = "VUID-VkImageMemoryBarrier-oldLayout-02088";
721 }
722 break;
723 default:
724 // Other VkImageLayout values do not have VUs defined in this context.
725 break;
726 }
727
728 if (msg_code != kVUIDUndefined) {
729 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
730 HandleToUint64(img_barrier->image), msg_code,
731 "%s: Image barrier 0x%p %sLayout=%s is not compatible with image %s usage flags 0x%" PRIx32 ".", func_name,
732 static_cast<const void *>(img_barrier), ((new_not_old) ? "new" : "old"), string_VkImageLayout(layout),
733 report_data->FormatHandle(img_barrier->image).c_str(), usage_flags);
734 }
735 return skip;
736 }
737
738 // Scoreboard for checking for duplicate and inconsistent barriers to images
739 struct ImageBarrierScoreboardEntry {
740 uint32_t index;
741 // This is designed for temporary storage within the scope of the API call. If retained storage of the barriers is
742 // required, copies should be made and smart or unique pointers used in some other stucture (or this one refactored)
743 const VkImageMemoryBarrier *barrier;
744 };
745 using ImageBarrierScoreboardSubresMap = std::unordered_map<VkImageSubresourceRange, ImageBarrierScoreboardEntry>;
746 using ImageBarrierScoreboardImageMap = std::unordered_map<VkImage, ImageBarrierScoreboardSubresMap>;
747
748 // Verify image barriers are compatible with the images they reference.
ValidateBarriersToImages(layer_data * device_data,GLOBAL_CB_NODE const * cb_state,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers,const char * func_name)749 bool CoreChecks::ValidateBarriersToImages(layer_data *device_data, GLOBAL_CB_NODE const *cb_state, uint32_t imageMemoryBarrierCount,
750 const VkImageMemoryBarrier *pImageMemoryBarriers, const char *func_name) {
751 bool skip = false;
752
753 // Scoreboard for duplicate layout transition barriers within the list
754 // Pointers retained in the scoreboard only have the lifetime of *this* call (i.e. within the scope of the API call)
755 ImageBarrierScoreboardImageMap layout_transitions;
756
757 for (uint32_t i = 0; i < imageMemoryBarrierCount; ++i) {
758 auto img_barrier = &pImageMemoryBarriers[i];
759 if (!img_barrier) continue;
760
761 // Update the scoreboard of layout transitions and check for barriers affecting the same image and subresource
762 // TODO: a higher precision could be gained by adapting the command_buffer image_layout_map logic looking for conflicts
763 // at a per sub-resource level
764 if (img_barrier->oldLayout != img_barrier->newLayout) {
765 ImageBarrierScoreboardEntry new_entry{i, img_barrier};
766 auto image_it = layout_transitions.find(img_barrier->image);
767 if (image_it != layout_transitions.end()) {
768 auto &subres_map = image_it->second;
769 auto subres_it = subres_map.find(img_barrier->subresourceRange);
770 if (subres_it != subres_map.end()) {
771 auto &entry = subres_it->second;
772 if ((entry.barrier->newLayout != img_barrier->oldLayout) &&
773 (img_barrier->oldLayout != VK_IMAGE_LAYOUT_UNDEFINED)) {
774 const VkImageSubresourceRange &range = img_barrier->subresourceRange;
775 skip = log_msg(
776 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
777 HandleToUint64(cb_state->commandBuffer), "VUID-VkImageMemoryBarrier-oldLayout-01197",
778 "%s: pImageMemoryBarrier[%u] conflicts with earlier entry pImageMemoryBarrier[%u]. Image %s"
779 " subresourceRange: aspectMask=%u baseMipLevel=%u levelCount=%u, baseArrayLayer=%u, layerCount=%u; "
780 "conflicting barrier transitions image layout from %s when earlier barrier transitioned to layout %s.",
781 func_name, i, entry.index, report_data->FormatHandle(img_barrier->image).c_str(), range.aspectMask,
782 range.baseMipLevel, range.levelCount, range.baseArrayLayer, range.layerCount,
783 string_VkImageLayout(img_barrier->oldLayout), string_VkImageLayout(entry.barrier->newLayout));
784 }
785 entry = new_entry;
786 } else {
787 subres_map[img_barrier->subresourceRange] = new_entry;
788 }
789 } else {
790 layout_transitions[img_barrier->image][img_barrier->subresourceRange] = new_entry;
791 }
792 }
793
794 auto image_state = GetImageState(img_barrier->image);
795 if (image_state) {
796 VkImageUsageFlags usage_flags = image_state->createInfo.usage;
797 skip |= ValidateBarrierLayoutToImageUsage(device_data, img_barrier, false, usage_flags, func_name);
798 skip |= ValidateBarrierLayoutToImageUsage(device_data, img_barrier, true, usage_flags, func_name);
799
800 // Make sure layout is able to be transitioned, currently only presented shared presentable images are locked
801 if (image_state->layout_locked) {
802 // TODO: Add unique id for error when available
803 skip |= log_msg(
804 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
805 HandleToUint64(img_barrier->image), 0,
806 "Attempting to transition shared presentable image %s"
807 " from layout %s to layout %s, but image has already been presented and cannot have its layout transitioned.",
808 report_data->FormatHandle(img_barrier->image).c_str(), string_VkImageLayout(img_barrier->oldLayout),
809 string_VkImageLayout(img_barrier->newLayout));
810 }
811 }
812
813 VkImageCreateInfo *image_create_info = &(GetImageState(img_barrier->image)->createInfo);
814 // For a Depth/Stencil image both aspects MUST be set
815 if (FormatIsDepthAndStencil(image_create_info->format)) {
816 auto const aspect_mask = img_barrier->subresourceRange.aspectMask;
817 auto const ds_mask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
818 if ((aspect_mask & ds_mask) != (ds_mask)) {
819 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
820 HandleToUint64(img_barrier->image), "VUID-VkImageMemoryBarrier-image-01207",
821 "%s: Image barrier 0x%p references image %s of format %s that must have the depth and stencil "
822 "aspects set, but its aspectMask is 0x%" PRIx32 ".",
823 func_name, static_cast<const void *>(img_barrier),
824 report_data->FormatHandle(img_barrier->image).c_str(), string_VkFormat(image_create_info->format),
825 aspect_mask);
826 }
827 }
828 uint32_t level_count = ResolveRemainingLevels(&img_barrier->subresourceRange, image_create_info->mipLevels);
829 uint32_t layer_count = ResolveRemainingLayers(&img_barrier->subresourceRange, image_create_info->arrayLayers);
830
831 for (uint32_t j = 0; j < level_count; j++) {
832 uint32_t level = img_barrier->subresourceRange.baseMipLevel + j;
833 for (uint32_t k = 0; k < layer_count; k++) {
834 uint32_t layer = img_barrier->subresourceRange.baseArrayLayer + k;
835 skip |= ValidateImageAspectLayout(device_data, cb_state, img_barrier, level, layer, VK_IMAGE_ASPECT_COLOR_BIT);
836 skip |= ValidateImageAspectLayout(device_data, cb_state, img_barrier, level, layer, VK_IMAGE_ASPECT_DEPTH_BIT);
837 skip |= ValidateImageAspectLayout(device_data, cb_state, img_barrier, level, layer, VK_IMAGE_ASPECT_STENCIL_BIT);
838 skip |= ValidateImageAspectLayout(device_data, cb_state, img_barrier, level, layer, VK_IMAGE_ASPECT_METADATA_BIT);
839 if (GetDeviceExtensions()->vk_khr_sampler_ycbcr_conversion) {
840 skip |= ValidateImageAspectLayout(device_data, cb_state, img_barrier, level, layer,
841 VK_IMAGE_ASPECT_PLANE_0_BIT_KHR);
842 skip |= ValidateImageAspectLayout(device_data, cb_state, img_barrier, level, layer,
843 VK_IMAGE_ASPECT_PLANE_1_BIT_KHR);
844 skip |= ValidateImageAspectLayout(device_data, cb_state, img_barrier, level, layer,
845 VK_IMAGE_ASPECT_PLANE_2_BIT_KHR);
846 }
847 }
848 }
849 }
850 return skip;
851 }
852
IsReleaseOp(GLOBAL_CB_NODE * cb_state,VkImageMemoryBarrier const * barrier)853 bool CoreChecks::IsReleaseOp(GLOBAL_CB_NODE *cb_state, VkImageMemoryBarrier const *barrier) {
854 if (!IsTransferOp(barrier)) return false;
855
856 auto pool = GetCommandPoolNode(cb_state->createInfo.commandPool);
857 return pool && TempIsReleaseOp<VkImageMemoryBarrier, true>(pool, barrier);
858 }
859
860 template <typename Barrier>
ValidateQFOTransferBarrierUniqueness(layer_data * device_data,const char * func_name,GLOBAL_CB_NODE * cb_state,uint32_t barrier_count,const Barrier * barriers)861 bool CoreChecks::ValidateQFOTransferBarrierUniqueness(layer_data *device_data, const char *func_name, GLOBAL_CB_NODE *cb_state,
862 uint32_t barrier_count, const Barrier *barriers) {
863 using BarrierRecord = QFOTransferBarrier<Barrier>;
864 bool skip = false;
865 auto pool = GetCommandPoolNode(cb_state->createInfo.commandPool);
866 auto &barrier_sets = GetQFOBarrierSets(cb_state, typename BarrierRecord::Tag());
867 const char *barrier_name = BarrierRecord::BarrierName();
868 const char *handle_name = BarrierRecord::HandleName();
869 const char *transfer_type = nullptr;
870 for (uint32_t b = 0; b < barrier_count; b++) {
871 if (!IsTransferOp(&barriers[b])) continue;
872 const BarrierRecord *barrier_record = nullptr;
873 if (TempIsReleaseOp<Barrier, true /* Assume IsTransfer */>(pool, &barriers[b]) &&
874 !IsSpecial(barriers[b].dstQueueFamilyIndex)) {
875 const auto found = barrier_sets.release.find(barriers[b]);
876 if (found != barrier_sets.release.cend()) {
877 barrier_record = &(*found);
878 transfer_type = "releasing";
879 }
880 } else if (IsAcquireOp<Barrier, true /*Assume IsTransfer */>(pool, &barriers[b]) &&
881 !IsSpecial(barriers[b].srcQueueFamilyIndex)) {
882 const auto found = barrier_sets.acquire.find(barriers[b]);
883 if (found != barrier_sets.acquire.cend()) {
884 barrier_record = &(*found);
885 transfer_type = "acquiring";
886 }
887 }
888 if (barrier_record != nullptr) {
889 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
890 HandleToUint64(cb_state->commandBuffer), BarrierRecord::ErrMsgDuplicateQFOInCB(),
891 "%s: %s at index %" PRIu32 " %s queue ownership of %s (%s), from srcQueueFamilyIndex %" PRIu32
892 " to dstQueueFamilyIndex %" PRIu32 " duplicates existing barrier recorded in this command buffer.",
893 func_name, barrier_name, b, transfer_type, handle_name,
894 report_data->FormatHandle(barrier_record->handle).c_str(), barrier_record->srcQueueFamilyIndex,
895 barrier_record->dstQueueFamilyIndex);
896 }
897 }
898 return skip;
899 }
900
901 template <typename Barrier>
RecordQFOTransferBarriers(layer_data * device_data,GLOBAL_CB_NODE * cb_state,uint32_t barrier_count,const Barrier * barriers)902 void CoreChecks::RecordQFOTransferBarriers(layer_data *device_data, GLOBAL_CB_NODE *cb_state, uint32_t barrier_count,
903 const Barrier *barriers) {
904 auto pool = GetCommandPoolNode(cb_state->createInfo.commandPool);
905 auto &barrier_sets = GetQFOBarrierSets(cb_state, typename QFOTransferBarrier<Barrier>::Tag());
906 for (uint32_t b = 0; b < barrier_count; b++) {
907 if (!IsTransferOp(&barriers[b])) continue;
908 if (TempIsReleaseOp<Barrier, true /* Assume IsTransfer*/>(pool, &barriers[b]) &&
909 !IsSpecial(barriers[b].dstQueueFamilyIndex)) {
910 barrier_sets.release.emplace(barriers[b]);
911 } else if (IsAcquireOp<Barrier, true /*Assume IsTransfer */>(pool, &barriers[b]) &&
912 !IsSpecial(barriers[b].srcQueueFamilyIndex)) {
913 barrier_sets.acquire.emplace(barriers[b]);
914 }
915 }
916 }
917
ValidateBarriersQFOTransferUniqueness(layer_data * device_data,const char * func_name,GLOBAL_CB_NODE * cb_state,uint32_t bufferBarrierCount,const VkBufferMemoryBarrier * pBufferMemBarriers,uint32_t imageMemBarrierCount,const VkImageMemoryBarrier * pImageMemBarriers)918 bool CoreChecks::ValidateBarriersQFOTransferUniqueness(layer_data *device_data, const char *func_name, GLOBAL_CB_NODE *cb_state,
919 uint32_t bufferBarrierCount, const VkBufferMemoryBarrier *pBufferMemBarriers,
920 uint32_t imageMemBarrierCount,
921 const VkImageMemoryBarrier *pImageMemBarriers) {
922 bool skip = false;
923 skip |= ValidateQFOTransferBarrierUniqueness(device_data, func_name, cb_state, bufferBarrierCount, pBufferMemBarriers);
924 skip |= ValidateQFOTransferBarrierUniqueness(device_data, func_name, cb_state, imageMemBarrierCount, pImageMemBarriers);
925 return skip;
926 }
927
RecordBarriersQFOTransfers(layer_data * device_data,GLOBAL_CB_NODE * cb_state,uint32_t bufferBarrierCount,const VkBufferMemoryBarrier * pBufferMemBarriers,uint32_t imageMemBarrierCount,const VkImageMemoryBarrier * pImageMemBarriers)928 void CoreChecks::RecordBarriersQFOTransfers(layer_data *device_data, GLOBAL_CB_NODE *cb_state, uint32_t bufferBarrierCount,
929 const VkBufferMemoryBarrier *pBufferMemBarriers, uint32_t imageMemBarrierCount,
930 const VkImageMemoryBarrier *pImageMemBarriers) {
931 RecordQFOTransferBarriers(device_data, cb_state, bufferBarrierCount, pBufferMemBarriers);
932 RecordQFOTransferBarriers(device_data, cb_state, imageMemBarrierCount, pImageMemBarriers);
933 }
934
935 template <typename BarrierRecord, typename Scoreboard>
ValidateAndUpdateQFOScoreboard(const debug_report_data * report_data,const GLOBAL_CB_NODE * cb_state,const char * operation,const BarrierRecord & barrier,Scoreboard * scoreboard)936 bool CoreChecks::ValidateAndUpdateQFOScoreboard(const debug_report_data *report_data, const GLOBAL_CB_NODE *cb_state,
937 const char *operation, const BarrierRecord &barrier, Scoreboard *scoreboard) {
938 // Record to the scoreboard or report that we have a duplication
939 bool skip = false;
940 auto inserted = scoreboard->insert(std::make_pair(barrier, cb_state));
941 if (!inserted.second && inserted.first->second != cb_state) {
942 // This is a duplication (but don't report duplicates from the same CB, as we do that at record time
943 skip = log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
944 HandleToUint64(cb_state->commandBuffer), BarrierRecord::ErrMsgDuplicateQFOInSubmit(),
945 "%s: %s %s queue ownership of %s (%s), from srcQueueFamilyIndex %" PRIu32 " to dstQueueFamilyIndex %" PRIu32
946 " duplicates existing barrier submitted in this batch from command buffer %s.",
947 "vkQueueSubmit()", BarrierRecord::BarrierName(), operation, BarrierRecord::HandleName(),
948 report_data->FormatHandle(barrier.handle).c_str(), barrier.srcQueueFamilyIndex, barrier.dstQueueFamilyIndex,
949 report_data->FormatHandle(inserted.first->second).c_str());
950 }
951 return skip;
952 }
953
954 template <typename Barrier>
ValidateQueuedQFOTransferBarriers(layer_data * device_data,GLOBAL_CB_NODE * cb_state,QFOTransferCBScoreboards<Barrier> * scoreboards)955 bool CoreChecks::ValidateQueuedQFOTransferBarriers(layer_data *device_data, GLOBAL_CB_NODE *cb_state,
956 QFOTransferCBScoreboards<Barrier> *scoreboards) {
957 using BarrierRecord = QFOTransferBarrier<Barrier>;
958 using TypeTag = typename BarrierRecord::Tag;
959 bool skip = false;
960 const auto &cb_barriers = GetQFOBarrierSets(cb_state, TypeTag());
961 const GlobalQFOTransferBarrierMap<Barrier> &global_release_barriers = GetGlobalQFOReleaseBarrierMap(TypeTag());
962 const char *barrier_name = BarrierRecord::BarrierName();
963 const char *handle_name = BarrierRecord::HandleName();
964 // No release should have an extant duplicate (WARNING)
965 for (const auto &release : cb_barriers.release) {
966 // Check the global pending release barriers
967 const auto set_it = global_release_barriers.find(release.handle);
968 if (set_it != global_release_barriers.cend()) {
969 const QFOTransferBarrierSet<Barrier> &set_for_handle = set_it->second;
970 const auto found = set_for_handle.find(release);
971 if (found != set_for_handle.cend()) {
972 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
973 HandleToUint64(cb_state->commandBuffer), BarrierRecord::ErrMsgDuplicateQFOSubmitted(),
974 "%s: %s releasing queue ownership of %s (%s), from srcQueueFamilyIndex %" PRIu32
975 " to dstQueueFamilyIndex %" PRIu32
976 " duplicates existing barrier queued for execution, without intervening acquire operation.",
977 "vkQueueSubmit()", barrier_name, handle_name, report_data->FormatHandle(found->handle).c_str(),
978 found->srcQueueFamilyIndex, found->dstQueueFamilyIndex);
979 }
980 }
981 skip |= ValidateAndUpdateQFOScoreboard(report_data, cb_state, "releasing", release, &scoreboards->release);
982 }
983 // Each acquire must have a matching release (ERROR)
984 for (const auto &acquire : cb_barriers.acquire) {
985 const auto set_it = global_release_barriers.find(acquire.handle);
986 bool matching_release_found = false;
987 if (set_it != global_release_barriers.cend()) {
988 const QFOTransferBarrierSet<Barrier> &set_for_handle = set_it->second;
989 matching_release_found = set_for_handle.find(acquire) != set_for_handle.cend();
990 }
991 if (!matching_release_found) {
992 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
993 HandleToUint64(cb_state->commandBuffer), BarrierRecord::ErrMsgMissingQFOReleaseInSubmit(),
994 "%s: in submitted command buffer %s acquiring ownership of %s (%s), from srcQueueFamilyIndex %" PRIu32
995 " to dstQueueFamilyIndex %" PRIu32 " has no matching release barrier queued for execution.",
996 "vkQueueSubmit()", barrier_name, handle_name, report_data->FormatHandle(acquire.handle).c_str(),
997 acquire.srcQueueFamilyIndex, acquire.dstQueueFamilyIndex);
998 }
999 skip |= ValidateAndUpdateQFOScoreboard(report_data, cb_state, "acquiring", acquire, &scoreboards->acquire);
1000 }
1001 return skip;
1002 }
1003
ValidateQueuedQFOTransfers(layer_data * device_data,GLOBAL_CB_NODE * cb_state,QFOTransferCBScoreboards<VkImageMemoryBarrier> * qfo_image_scoreboards,QFOTransferCBScoreboards<VkBufferMemoryBarrier> * qfo_buffer_scoreboards)1004 bool CoreChecks::ValidateQueuedQFOTransfers(layer_data *device_data, GLOBAL_CB_NODE *cb_state,
1005 QFOTransferCBScoreboards<VkImageMemoryBarrier> *qfo_image_scoreboards,
1006 QFOTransferCBScoreboards<VkBufferMemoryBarrier> *qfo_buffer_scoreboards) {
1007 bool skip = false;
1008 skip |= ValidateQueuedQFOTransferBarriers<VkImageMemoryBarrier>(device_data, cb_state, qfo_image_scoreboards);
1009 skip |= ValidateQueuedQFOTransferBarriers<VkBufferMemoryBarrier>(device_data, cb_state, qfo_buffer_scoreboards);
1010 return skip;
1011 }
1012
1013 template <typename Barrier>
RecordQueuedQFOTransferBarriers(layer_data * device_data,GLOBAL_CB_NODE * cb_state)1014 void CoreChecks::RecordQueuedQFOTransferBarriers(layer_data *device_data, GLOBAL_CB_NODE *cb_state) {
1015 using BarrierRecord = QFOTransferBarrier<Barrier>;
1016 using TypeTag = typename BarrierRecord::Tag;
1017 const auto &cb_barriers = GetQFOBarrierSets(cb_state, TypeTag());
1018 GlobalQFOTransferBarrierMap<Barrier> &global_release_barriers = GetGlobalQFOReleaseBarrierMap(TypeTag());
1019
1020 // Add release barriers from this submit to the global map
1021 for (const auto &release : cb_barriers.release) {
1022 // the global barrier list is mapped by resource handle to allow cleanup on resource destruction
1023 // NOTE: We're using [] because creation of a Set is a needed side effect for new handles
1024 global_release_barriers[release.handle].insert(release);
1025 }
1026
1027 // Erase acquired barriers from this submit from the global map -- essentially marking releases as consumed
1028 for (const auto &acquire : cb_barriers.acquire) {
1029 // NOTE: We're not using [] because we don't want to create entries for missing releases
1030 auto set_it = global_release_barriers.find(acquire.handle);
1031 if (set_it != global_release_barriers.end()) {
1032 QFOTransferBarrierSet<Barrier> &set_for_handle = set_it->second;
1033 set_for_handle.erase(acquire);
1034 if (set_for_handle.size() == 0) { // Clean up empty sets
1035 global_release_barriers.erase(set_it);
1036 }
1037 }
1038 }
1039 }
1040
RecordQueuedQFOTransfers(layer_data * device_data,GLOBAL_CB_NODE * cb_state)1041 void CoreChecks::RecordQueuedQFOTransfers(layer_data *device_data, GLOBAL_CB_NODE *cb_state) {
1042 RecordQueuedQFOTransferBarriers<VkImageMemoryBarrier>(device_data, cb_state);
1043 RecordQueuedQFOTransferBarriers<VkBufferMemoryBarrier>(device_data, cb_state);
1044 }
1045
1046 // Avoid making the template globally visible by exporting the one instance of it we need.
EraseQFOImageRelaseBarriers(layer_data * device_data,const VkImage & image)1047 void CoreChecks::EraseQFOImageRelaseBarriers(layer_data *device_data, const VkImage &image) {
1048 EraseQFOReleaseBarriers<VkImageMemoryBarrier>(device_data, image);
1049 }
1050
TransitionImageLayouts(layer_data * device_data,GLOBAL_CB_NODE * cb_state,uint32_t memBarrierCount,const VkImageMemoryBarrier * pImgMemBarriers)1051 void CoreChecks::TransitionImageLayouts(layer_data *device_data, GLOBAL_CB_NODE *cb_state, uint32_t memBarrierCount,
1052 const VkImageMemoryBarrier *pImgMemBarriers) {
1053 for (uint32_t i = 0; i < memBarrierCount; ++i) {
1054 auto mem_barrier = &pImgMemBarriers[i];
1055 if (!mem_barrier) continue;
1056
1057 // For ownership transfers, the barrier is specified twice; as a release
1058 // operation on the yielding queue family, and as an acquire operation
1059 // on the acquiring queue family. This barrier may also include a layout
1060 // transition, which occurs 'between' the two operations. For validation
1061 // purposes it doesn't seem important which side performs the layout
1062 // transition, but it must not be performed twice. We'll arbitrarily
1063 // choose to perform it as part of the acquire operation.
1064 if (IsReleaseOp(cb_state, mem_barrier)) {
1065 continue;
1066 }
1067
1068 VkImageCreateInfo *image_create_info = &(GetImageState(mem_barrier->image)->createInfo);
1069 uint32_t level_count = ResolveRemainingLevels(&mem_barrier->subresourceRange, image_create_info->mipLevels);
1070 uint32_t layer_count = ResolveRemainingLayers(&mem_barrier->subresourceRange, image_create_info->arrayLayers);
1071
1072 // Special case for 3D images with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR flag bit, where <extent.depth> and
1073 // <arrayLayers> can potentially alias. When recording layout for the entire image, pre-emptively record layouts
1074 // for all (potential) layer sub_resources.
1075 if ((0 != (image_create_info->flags & VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR)) &&
1076 (mem_barrier->subresourceRange.baseArrayLayer == 0) && (layer_count == 1)) {
1077 layer_count = image_create_info->extent.depth; // Treat each depth slice as a layer subresource
1078 }
1079
1080 // For multiplanar formats, IMAGE_ASPECT_COLOR is equivalent to adding the aspect of the individual planes
1081 VkImageAspectFlags aspect_mask = mem_barrier->subresourceRange.aspectMask;
1082 if (GetDeviceExtensions()->vk_khr_sampler_ycbcr_conversion) {
1083 if (FormatIsMultiplane(image_create_info->format)) {
1084 if (aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) {
1085 aspect_mask &= ~VK_IMAGE_ASPECT_COLOR_BIT;
1086 aspect_mask |= (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT);
1087 if (FormatPlaneCount(image_create_info->format) > 2) {
1088 aspect_mask |= VK_IMAGE_ASPECT_PLANE_2_BIT;
1089 }
1090 }
1091 }
1092 }
1093
1094 for (uint32_t j = 0; j < level_count; j++) {
1095 uint32_t level = mem_barrier->subresourceRange.baseMipLevel + j;
1096 for (uint32_t k = 0; k < layer_count; k++) {
1097 uint32_t layer = mem_barrier->subresourceRange.baseArrayLayer + k;
1098 TransitionImageAspectLayout(device_data, cb_state, mem_barrier, level, layer, aspect_mask,
1099 VK_IMAGE_ASPECT_COLOR_BIT);
1100 TransitionImageAspectLayout(device_data, cb_state, mem_barrier, level, layer, aspect_mask,
1101 VK_IMAGE_ASPECT_DEPTH_BIT);
1102 TransitionImageAspectLayout(device_data, cb_state, mem_barrier, level, layer, aspect_mask,
1103 VK_IMAGE_ASPECT_STENCIL_BIT);
1104 TransitionImageAspectLayout(device_data, cb_state, mem_barrier, level, layer, aspect_mask,
1105 VK_IMAGE_ASPECT_METADATA_BIT);
1106 if (GetDeviceExtensions()->vk_khr_sampler_ycbcr_conversion) {
1107 TransitionImageAspectLayout(device_data, cb_state, mem_barrier, level, layer, aspect_mask,
1108 VK_IMAGE_ASPECT_PLANE_0_BIT_KHR);
1109 TransitionImageAspectLayout(device_data, cb_state, mem_barrier, level, layer, aspect_mask,
1110 VK_IMAGE_ASPECT_PLANE_1_BIT_KHR);
1111 TransitionImageAspectLayout(device_data, cb_state, mem_barrier, level, layer, aspect_mask,
1112 VK_IMAGE_ASPECT_PLANE_2_BIT_KHR);
1113 }
1114 }
1115 }
1116 }
1117 }
1118
VerifyImageLayout(layer_data const * device_data,GLOBAL_CB_NODE const * cb_node,IMAGE_STATE * image_state,VkImageSubresourceLayers subLayers,VkImageLayout explicit_layout,VkImageLayout optimal_layout,const char * caller,const char * layout_invalid_msg_code,const char * layout_mismatch_msg_code,bool * error)1119 bool CoreChecks::VerifyImageLayout(layer_data const *device_data, GLOBAL_CB_NODE const *cb_node, IMAGE_STATE *image_state,
1120 VkImageSubresourceLayers subLayers, VkImageLayout explicit_layout, VkImageLayout optimal_layout,
1121 const char *caller, const char *layout_invalid_msg_code, const char *layout_mismatch_msg_code,
1122 bool *error) {
1123 const auto image = image_state->image;
1124 bool skip = false;
1125
1126 for (uint32_t i = 0; i < subLayers.layerCount; ++i) {
1127 uint32_t layer = i + subLayers.baseArrayLayer;
1128 VkImageSubresource sub = {subLayers.aspectMask, subLayers.mipLevel, layer};
1129 IMAGE_CMD_BUF_LAYOUT_NODE node;
1130 if (FindCmdBufLayout(device_data, cb_node, image, sub, node)) {
1131 if (node.layout != explicit_layout) {
1132 *error = true;
1133 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
1134 HandleToUint64(cb_node->commandBuffer), layout_mismatch_msg_code,
1135 "%s: Cannot use image %s (layer=%u mip=%u) with specific layout %s that doesn't match the actual "
1136 "current layout %s.",
1137 caller, report_data->FormatHandle(image).c_str(), layer, subLayers.mipLevel,
1138 string_VkImageLayout(explicit_layout), string_VkImageLayout(node.layout));
1139 }
1140 }
1141 }
1142 // If optimal_layout is not UNDEFINED, check that layout matches optimal for this case
1143 if ((VK_IMAGE_LAYOUT_UNDEFINED != optimal_layout) && (explicit_layout != optimal_layout)) {
1144 if (VK_IMAGE_LAYOUT_GENERAL == explicit_layout) {
1145 if (image_state->createInfo.tiling != VK_IMAGE_TILING_LINEAR) {
1146 // LAYOUT_GENERAL is allowed, but may not be performance optimal, flag as perf warning.
1147 skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
1148 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, HandleToUint64(cb_node->commandBuffer),
1149 kVUID_Core_DrawState_InvalidImageLayout,
1150 "%s: For optimal performance image %s layout should be %s instead of GENERAL.", caller,
1151 report_data->FormatHandle(image).c_str(), string_VkImageLayout(optimal_layout));
1152 }
1153 } else if (GetDeviceExtensions()->vk_khr_shared_presentable_image) {
1154 if (image_state->shared_presentable) {
1155 if (VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR != explicit_layout) {
1156 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
1157 layout_invalid_msg_code,
1158 "Layout for shared presentable image is %s but must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR.",
1159 string_VkImageLayout(optimal_layout));
1160 }
1161 }
1162 } else {
1163 *error = true;
1164 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
1165 HandleToUint64(cb_node->commandBuffer), layout_invalid_msg_code,
1166 "%s: Layout for image %s is %s but can only be %s or VK_IMAGE_LAYOUT_GENERAL.", caller,
1167 report_data->FormatHandle(image).c_str(), string_VkImageLayout(explicit_layout),
1168 string_VkImageLayout(optimal_layout));
1169 }
1170 }
1171 return skip;
1172 }
1173
TransitionFinalSubpassLayouts(layer_data * device_data,GLOBAL_CB_NODE * pCB,const VkRenderPassBeginInfo * pRenderPassBegin,FRAMEBUFFER_STATE * framebuffer_state)1174 void CoreChecks::TransitionFinalSubpassLayouts(layer_data *device_data, GLOBAL_CB_NODE *pCB,
1175 const VkRenderPassBeginInfo *pRenderPassBegin,
1176 FRAMEBUFFER_STATE *framebuffer_state) {
1177 auto renderPass = GetRenderPassState(pRenderPassBegin->renderPass);
1178 if (!renderPass) return;
1179
1180 const VkRenderPassCreateInfo2KHR *pRenderPassInfo = renderPass->createInfo.ptr();
1181 if (framebuffer_state) {
1182 for (uint32_t i = 0; i < pRenderPassInfo->attachmentCount; ++i) {
1183 auto view_state = GetAttachmentImageViewState(framebuffer_state, i);
1184 if (view_state) {
1185 SetImageViewLayout(device_data, pCB, view_state, pRenderPassInfo->pAttachments[i].finalLayout);
1186 }
1187 }
1188 }
1189 }
1190
1191 #ifdef VK_USE_PLATFORM_ANDROID_KHR
1192 // Android-specific validation that uses types defined only with VK_USE_PLATFORM_ANDROID_KHR
1193 // This could also move into a seperate core_validation_android.cpp file... ?
1194
1195 //
1196 // AHB-specific validation within non-AHB APIs
1197 //
ValidateCreateImageANDROID(layer_data * device_data,const debug_report_data * report_data,const VkImageCreateInfo * create_info)1198 bool CoreChecks::ValidateCreateImageANDROID(layer_data *device_data, const debug_report_data *report_data,
1199 const VkImageCreateInfo *create_info) {
1200 bool skip = false;
1201
1202 const VkExternalFormatANDROID *ext_fmt_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
1203 if (ext_fmt_android) {
1204 if (0 != ext_fmt_android->externalFormat) {
1205 if (VK_FORMAT_UNDEFINED != create_info->format) {
1206 skip |=
1207 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
1208 "VUID-VkImageCreateInfo-pNext-01974",
1209 "vkCreateImage(): VkImageCreateInfo struct has a chained VkExternalFormatANDROID struct with non-zero "
1210 "externalFormat, but the VkImageCreateInfo's format is not VK_FORMAT_UNDEFINED.");
1211 }
1212
1213 if (0 != (VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT & create_info->flags)) {
1214 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
1215 "VUID-VkImageCreateInfo-pNext-02396",
1216 "vkCreateImage(): VkImageCreateInfo struct has a chained VkExternalFormatANDROID struct with "
1217 "non-zero externalFormat, but flags include VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT.");
1218 }
1219
1220 if (0 != (~VK_IMAGE_USAGE_SAMPLED_BIT & create_info->usage)) {
1221 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
1222 "VUID-VkImageCreateInfo-pNext-02397",
1223 "vkCreateImage(): VkImageCreateInfo struct has a chained VkExternalFormatANDROID struct with "
1224 "non-zero externalFormat, but usage includes bits other than VK_IMAGE_USAGE_SAMPLED_BIT.");
1225 }
1226
1227 if (VK_IMAGE_TILING_OPTIMAL != create_info->tiling) {
1228 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
1229 "VUID-VkImageCreateInfo-pNext-02398",
1230 "vkCreateImage(): VkImageCreateInfo struct has a chained VkExternalFormatANDROID struct with "
1231 "non-zero externalFormat, but layout is not VK_IMAGE_TILING_OPTIMAL.");
1232 }
1233 }
1234
1235 auto ahb_formats = GetAHBExternalFormatsSet();
1236 if ((0 != ext_fmt_android->externalFormat) && (0 == ahb_formats->count(ext_fmt_android->externalFormat))) {
1237 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
1238 "VUID-VkExternalFormatANDROID-externalFormat-01894",
1239 "vkCreateImage(): Chained VkExternalFormatANDROID struct contains a non-zero externalFormat which has "
1240 "not been previously retrieved by vkGetAndroidHardwareBufferPropertiesANDROID().");
1241 }
1242 }
1243
1244 if ((nullptr == ext_fmt_android) || (0 == ext_fmt_android->externalFormat)) {
1245 if (VK_FORMAT_UNDEFINED == create_info->format) {
1246 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
1247 "VUID-VkImageCreateInfo-pNext-01975",
1248 "vkCreateImage(): VkImageCreateInfo struct's format is VK_FORMAT_UNDEFINED, but either does not have a "
1249 "chained VkExternalFormatANDROID struct or the struct exists but has an externalFormat of 0.");
1250 }
1251 }
1252
1253 const VkExternalMemoryImageCreateInfo *emici = lvl_find_in_chain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
1254 if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
1255 if (create_info->imageType != VK_IMAGE_TYPE_2D) {
1256 skip |=
1257 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
1258 "VUID-VkImageCreateInfo-pNext-02393",
1259 "vkCreateImage(): VkImageCreateInfo struct with imageType %s has chained VkExternalMemoryImageCreateInfo "
1260 "struct with handleType VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID.",
1261 string_VkImageType(create_info->imageType));
1262 }
1263
1264 if ((create_info->mipLevels != 1) && (create_info->mipLevels != FullMipChainLevels(create_info->extent))) {
1265 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
1266 "VUID-VkImageCreateInfo-pNext-02394",
1267 "vkCreateImage(): VkImageCreateInfo struct with chained VkExternalMemoryImageCreateInfo struct of "
1268 "handleType VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID "
1269 "specifies mipLevels = %" PRId32 " (full chain mipLevels are %" PRId32 ").",
1270 create_info->mipLevels, FullMipChainLevels(create_info->extent));
1271 }
1272 }
1273
1274 return skip;
1275 }
1276
RecordCreateImageANDROID(const VkImageCreateInfo * create_info,IMAGE_STATE * is_node)1277 void CoreChecks::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {
1278 const VkExternalMemoryImageCreateInfo *emici = lvl_find_in_chain<VkExternalMemoryImageCreateInfo>(create_info->pNext);
1279 if (emici && (emici->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)) {
1280 is_node->imported_ahb = true;
1281 }
1282 const VkExternalFormatANDROID *ext_fmt_android = lvl_find_in_chain<VkExternalFormatANDROID>(create_info->pNext);
1283 if (ext_fmt_android && (0 != ext_fmt_android->externalFormat)) {
1284 is_node->has_ahb_format = true;
1285 is_node->ahb_format = ext_fmt_android->externalFormat;
1286 }
1287 }
1288
ValidateCreateImageViewANDROID(layer_data * device_data,const VkImageViewCreateInfo * create_info)1289 bool CoreChecks::ValidateCreateImageViewANDROID(layer_data *device_data, const VkImageViewCreateInfo *create_info) {
1290 bool skip = false;
1291 IMAGE_STATE *image_state = GetImageState(create_info->image);
1292
1293 if (image_state->has_ahb_format) {
1294 if (VK_FORMAT_UNDEFINED != create_info->format) {
1295 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
1296 HandleToUint64(create_info->image), "VUID-VkImageViewCreateInfo-image-02399",
1297 "vkCreateImageView(): VkImageViewCreateInfo struct has a chained VkExternalFormatANDROID struct, but "
1298 "format member is %s.",
1299 string_VkFormat(create_info->format));
1300 }
1301
1302 // Chain must include a compatible ycbcr conversion
1303 bool conv_found = false;
1304 uint64_t external_format = 0;
1305 const VkSamplerYcbcrConversionInfo *ycbcr_conv_info = lvl_find_in_chain<VkSamplerYcbcrConversionInfo>(create_info->pNext);
1306 if (ycbcr_conv_info != nullptr) {
1307 VkSamplerYcbcrConversion conv_handle = ycbcr_conv_info->conversion;
1308 auto fmap = GetYcbcrConversionFormatMap();
1309 if (fmap->find(conv_handle) != fmap->end()) {
1310 conv_found = true;
1311 external_format = fmap->at(conv_handle);
1312 }
1313 }
1314 if ((!conv_found) || (external_format != image_state->ahb_format)) {
1315 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
1316 HandleToUint64(create_info->image), "VUID-VkImageViewCreateInfo-image-02400",
1317 "vkCreateImageView(): VkImageViewCreateInfo struct has a chained VkExternalFormatANDROID struct, but "
1318 "without a chained VkSamplerYcbcrConversionInfo struct with the same external format.");
1319 }
1320
1321 // Errors in create_info swizzles
1322 if ((create_info->components.r != VK_COMPONENT_SWIZZLE_IDENTITY) ||
1323 (create_info->components.g != VK_COMPONENT_SWIZZLE_IDENTITY) ||
1324 (create_info->components.b != VK_COMPONENT_SWIZZLE_IDENTITY) ||
1325 (create_info->components.a != VK_COMPONENT_SWIZZLE_IDENTITY)) {
1326 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
1327 HandleToUint64(create_info->image), "VUID-VkImageViewCreateInfo-image-02401",
1328 "vkCreateImageView(): VkImageViewCreateInfo struct has a chained VkExternalFormatANDROID struct, but "
1329 "includes one or more non-identity component swizzles.");
1330 }
1331 }
1332
1333 return skip;
1334 }
1335
ValidateGetImageSubresourceLayoutANDROID(layer_data * device_data,const VkImage image)1336 bool CoreChecks::ValidateGetImageSubresourceLayoutANDROID(layer_data *device_data, const VkImage image) {
1337 bool skip = false;
1338
1339 IMAGE_STATE *image_state = GetImageState(image);
1340 if (image_state->imported_ahb && (0 == image_state->GetBoundMemory().size())) {
1341 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image),
1342 "VUID-vkGetImageSubresourceLayout-image-01895",
1343 "vkGetImageSubresourceLayout(): Attempt to query layout from an image created with "
1344 "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID handleType which has not yet been "
1345 "bound to memory.");
1346 }
1347 return skip;
1348 }
1349
1350 #else
1351
ValidateCreateImageANDROID(layer_data * device_data,const debug_report_data * report_data,const VkImageCreateInfo * create_info)1352 bool CoreChecks::ValidateCreateImageANDROID(layer_data *device_data, const debug_report_data *report_data,
1353 const VkImageCreateInfo *create_info) {
1354 return false;
1355 }
1356
RecordCreateImageANDROID(const VkImageCreateInfo * create_info,IMAGE_STATE * is_node)1357 void CoreChecks::RecordCreateImageANDROID(const VkImageCreateInfo *create_info, IMAGE_STATE *is_node) {}
1358
ValidateCreateImageViewANDROID(layer_data * device_data,const VkImageViewCreateInfo * create_info)1359 bool CoreChecks::ValidateCreateImageViewANDROID(layer_data *device_data, const VkImageViewCreateInfo *create_info) { return false; }
1360
ValidateGetImageSubresourceLayoutANDROID(layer_data * device_data,const VkImage image)1361 bool CoreChecks::ValidateGetImageSubresourceLayoutANDROID(layer_data *device_data, const VkImage image) { return false; }
1362
1363 #endif // VK_USE_PLATFORM_ANDROID_KHR
1364
PreCallValidateCreateImage(VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage)1365 bool CoreChecks::PreCallValidateCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
1366 const VkAllocationCallbacks *pAllocator, VkImage *pImage) {
1367 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
1368 bool skip = false;
1369
1370 if (GetDeviceExtensions()->vk_android_external_memory_android_hardware_buffer) {
1371 skip |= ValidateCreateImageANDROID(device_data, report_data, pCreateInfo);
1372 } else { // These checks are omitted or replaced when Android HW Buffer extension is active
1373 if (pCreateInfo->format == VK_FORMAT_UNDEFINED) {
1374 return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
1375 "VUID-VkImageCreateInfo-format-00943",
1376 "vkCreateImage(): VkFormat for image must not be VK_FORMAT_UNDEFINED.");
1377 }
1378 }
1379
1380 if ((pCreateInfo->flags & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT) && (VK_IMAGE_TYPE_2D != pCreateInfo->imageType)) {
1381 skip |= log_msg(
1382 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
1383 "VUID-VkImageCreateInfo-flags-00949",
1384 "vkCreateImage(): Image type must be VK_IMAGE_TYPE_2D when VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT flag bit is set");
1385 }
1386
1387 const VkPhysicalDeviceLimits *device_limits = &(GetPDProperties()->limits);
1388 VkImageUsageFlags attach_flags = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT |
1389 VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
1390 if ((pCreateInfo->usage & attach_flags) && (pCreateInfo->extent.width > device_limits->maxFramebufferWidth)) {
1391 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
1392 "VUID-VkImageCreateInfo-usage-00964",
1393 "vkCreateImage(): Image usage flags include a frame buffer attachment bit and image width exceeds device "
1394 "maxFramebufferWidth.");
1395 }
1396
1397 if ((pCreateInfo->usage & attach_flags) && (pCreateInfo->extent.height > device_limits->maxFramebufferHeight)) {
1398 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
1399 "VUID-VkImageCreateInfo-usage-00965",
1400 "vkCreateImage(): Image usage flags include a frame buffer attachment bit and image height exceeds device "
1401 "maxFramebufferHeight");
1402 }
1403
1404 VkImageFormatProperties format_limits = {};
1405 VkResult res = GetPDImageFormatProperties(pCreateInfo, &format_limits);
1406 if (res == VK_ERROR_FORMAT_NOT_SUPPORTED) {
1407 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUIDUndefined,
1408 "vkCreateImage(): Format %s is not supported for this combination of parameters.",
1409 string_VkFormat(pCreateInfo->format));
1410 } else {
1411 if (pCreateInfo->mipLevels > format_limits.maxMipLevels) {
1412 const char *format_string = string_VkFormat(pCreateInfo->format);
1413 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
1414 "VUID-VkImageCreateInfo-mipLevels-02255",
1415 "vkCreateImage(): Image mip levels=%d exceed image format maxMipLevels=%d for format %s.",
1416 pCreateInfo->mipLevels, format_limits.maxMipLevels, format_string);
1417 }
1418
1419 uint64_t texel_count = (uint64_t)pCreateInfo->extent.width * (uint64_t)pCreateInfo->extent.height *
1420 (uint64_t)pCreateInfo->extent.depth * (uint64_t)pCreateInfo->arrayLayers *
1421 (uint64_t)pCreateInfo->samples;
1422 uint64_t total_size = (uint64_t)std::ceil(FormatTexelSize(pCreateInfo->format) * texel_count);
1423
1424 // Round up to imageGranularity boundary
1425 VkDeviceSize imageGranularity = GetPDProperties()->limits.bufferImageGranularity;
1426 uint64_t ig_mask = imageGranularity - 1;
1427 total_size = (total_size + ig_mask) & ~ig_mask;
1428
1429 if (total_size > format_limits.maxResourceSize) {
1430 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 0,
1431 kVUID_Core_Image_InvalidFormatLimitsViolation,
1432 "vkCreateImage(): resource size exceeds allowable maximum Image resource size = 0x%" PRIxLEAST64
1433 ", maximum resource size = 0x%" PRIxLEAST64 " ",
1434 total_size, format_limits.maxResourceSize);
1435 }
1436
1437 if (pCreateInfo->arrayLayers > format_limits.maxArrayLayers) {
1438 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 0,
1439 "VUID-VkImageCreateInfo-arrayLayers-02256",
1440 "vkCreateImage(): arrayLayers=%d exceeds allowable maximum supported by format of %d.",
1441 pCreateInfo->arrayLayers, format_limits.maxArrayLayers);
1442 }
1443
1444 if ((pCreateInfo->samples & format_limits.sampleCounts) == 0) {
1445 skip |=
1446 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, 0,
1447 "VUID-VkImageCreateInfo-samples-02258", "vkCreateImage(): samples %s is not supported by format 0x%.8X.",
1448 string_VkSampleCountFlagBits(pCreateInfo->samples), format_limits.sampleCounts);
1449 }
1450 }
1451
1452 if ((pCreateInfo->flags & VK_IMAGE_CREATE_SPARSE_ALIASED_BIT) && (!GetEnabledFeatures()->core.sparseResidencyAliased)) {
1453 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
1454 "VUID-VkImageCreateInfo-flags-01924",
1455 "vkCreateImage(): the sparseResidencyAliased device feature is disabled: Images cannot be created with the "
1456 "VK_IMAGE_CREATE_SPARSE_ALIASED_BIT set.");
1457 }
1458
1459 if (GetDeviceExtensions()->vk_khr_maintenance2) {
1460 if (pCreateInfo->flags & VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR) {
1461 if (!(FormatIsCompressed_BC(pCreateInfo->format) || FormatIsCompressed_ASTC_LDR(pCreateInfo->format) ||
1462 FormatIsCompressed_ETC2_EAC(pCreateInfo->format))) {
1463 // TODO: Add Maintenance2 VUID
1464 skip |=
1465 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUIDUndefined,
1466 "vkCreateImage(): If pCreateInfo->flags contains VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR, "
1467 "format must be block, ETC or ASTC compressed, but is %s",
1468 string_VkFormat(pCreateInfo->format));
1469 }
1470 if (!(pCreateInfo->flags & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT)) {
1471 // TODO: Add Maintenance2 VUID
1472 skip |=
1473 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUIDUndefined,
1474 "vkCreateImage(): If pCreateInfo->flags contains VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR, "
1475 "flags must also contain VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT.");
1476 }
1477 }
1478 }
1479
1480 if (pCreateInfo->sharingMode == VK_SHARING_MODE_CONCURRENT && pCreateInfo->pQueueFamilyIndices) {
1481 skip |=
1482 ValidateQueueFamilies(device_data, pCreateInfo->queueFamilyIndexCount, pCreateInfo->pQueueFamilyIndices,
1483 "vkCreateImage", "pCreateInfo->pQueueFamilyIndices", "VUID-VkImageCreateInfo-sharingMode-01420",
1484 "VUID-VkImageCreateInfo-sharingMode-01420", false);
1485 }
1486
1487 return skip;
1488 }
1489
PostCallRecordCreateImage(VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage,VkResult result)1490 void CoreChecks::PostCallRecordCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
1491 const VkAllocationCallbacks *pAllocator, VkImage *pImage, VkResult result) {
1492 if (VK_SUCCESS != result) return;
1493 IMAGE_LAYOUT_NODE image_state;
1494 image_state.layout = pCreateInfo->initialLayout;
1495 image_state.format = pCreateInfo->format;
1496 IMAGE_STATE *is_node = new IMAGE_STATE(*pImage, pCreateInfo);
1497 if (GetDeviceExtensions()->vk_android_external_memory_android_hardware_buffer) {
1498 RecordCreateImageANDROID(pCreateInfo, is_node);
1499 }
1500 GetImageMap()->insert(std::make_pair(*pImage, std::unique_ptr<IMAGE_STATE>(is_node)));
1501 ImageSubresourcePair subpair{*pImage, false, VkImageSubresource()};
1502 (*GetImageSubresourceMap())[*pImage].push_back(subpair);
1503 (*GetImageLayoutMap())[subpair] = image_state;
1504 }
1505
PreCallValidateDestroyImage(VkDevice device,VkImage image,const VkAllocationCallbacks * pAllocator)1506 bool CoreChecks::PreCallValidateDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
1507 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
1508 IMAGE_STATE *image_state = GetImageState(image);
1509 const VK_OBJECT obj_struct = {HandleToUint64(image), kVulkanObjectTypeImage};
1510 bool skip = false;
1511 if (image_state) {
1512 skip |= ValidateObjectNotInUse(device_data, image_state, obj_struct, "vkDestroyImage", "VUID-vkDestroyImage-image-01000");
1513 }
1514 return skip;
1515 }
1516
PreCallRecordDestroyImage(VkDevice device,VkImage image,const VkAllocationCallbacks * pAllocator)1517 void CoreChecks::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
1518 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
1519 if (!image) return;
1520 IMAGE_STATE *image_state = GetImageState(image);
1521 VK_OBJECT obj_struct = {HandleToUint64(image), kVulkanObjectTypeImage};
1522 InvalidateCommandBuffers(device_data, image_state->cb_bindings, obj_struct);
1523 // Clean up memory mapping, bindings and range references for image
1524 for (auto mem_binding : image_state->GetBoundMemory()) {
1525 auto mem_info = GetMemObjInfo(mem_binding);
1526 if (mem_info) {
1527 RemoveImageMemoryRange(obj_struct.handle, mem_info);
1528 }
1529 }
1530 ClearMemoryObjectBindings(obj_struct.handle, kVulkanObjectTypeImage);
1531 EraseQFOReleaseBarriers<VkImageMemoryBarrier>(device_data, image);
1532 // Remove image from imageMap
1533 GetImageMap()->erase(image);
1534 std::unordered_map<VkImage, std::vector<ImageSubresourcePair>> *imageSubresourceMap = GetImageSubresourceMap();
1535
1536 const auto &sub_entry = imageSubresourceMap->find(image);
1537 if (sub_entry != imageSubresourceMap->end()) {
1538 for (const auto &pair : sub_entry->second) {
1539 GetImageLayoutMap()->erase(pair);
1540 }
1541 imageSubresourceMap->erase(sub_entry);
1542 }
1543 }
1544
ValidateImageAttributes(layer_data * device_data,IMAGE_STATE * image_state,VkImageSubresourceRange range)1545 bool CoreChecks::ValidateImageAttributes(layer_data *device_data, IMAGE_STATE *image_state, VkImageSubresourceRange range) {
1546 bool skip = false;
1547
1548 if (range.aspectMask != VK_IMAGE_ASPECT_COLOR_BIT) {
1549 char const str[] = "vkCmdClearColorImage aspectMasks for all subresource ranges must be set to VK_IMAGE_ASPECT_COLOR_BIT";
1550 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
1551 HandleToUint64(image_state->image), kVUID_Core_DrawState_InvalidImageAspect, str);
1552 }
1553
1554 if (FormatIsDepthOrStencil(image_state->createInfo.format)) {
1555 char const str[] = "vkCmdClearColorImage called with depth/stencil image.";
1556 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
1557 HandleToUint64(image_state->image), "VUID-vkCmdClearColorImage-image-00007", "%s.", str);
1558 } else if (FormatIsCompressed(image_state->createInfo.format)) {
1559 char const str[] = "vkCmdClearColorImage called with compressed image.";
1560 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
1561 HandleToUint64(image_state->image), "VUID-vkCmdClearColorImage-image-00007", "%s.", str);
1562 }
1563
1564 if (!(image_state->createInfo.usage & VK_IMAGE_USAGE_TRANSFER_DST_BIT)) {
1565 char const str[] = "vkCmdClearColorImage called with image created without VK_IMAGE_USAGE_TRANSFER_DST_BIT.";
1566 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
1567 HandleToUint64(image_state->image), "VUID-vkCmdClearColorImage-image-00002", "%s.", str);
1568 }
1569 return skip;
1570 }
1571
ResolveRemainingLevels(const VkImageSubresourceRange * range,uint32_t mip_levels)1572 uint32_t ResolveRemainingLevels(const VkImageSubresourceRange *range, uint32_t mip_levels) {
1573 // Return correct number of mip levels taking into account VK_REMAINING_MIP_LEVELS
1574 uint32_t mip_level_count = range->levelCount;
1575 if (range->levelCount == VK_REMAINING_MIP_LEVELS) {
1576 mip_level_count = mip_levels - range->baseMipLevel;
1577 }
1578 return mip_level_count;
1579 }
1580
ResolveRemainingLayers(const VkImageSubresourceRange * range,uint32_t layers)1581 uint32_t ResolveRemainingLayers(const VkImageSubresourceRange *range, uint32_t layers) {
1582 // Return correct number of layers taking into account VK_REMAINING_ARRAY_LAYERS
1583 uint32_t array_layer_count = range->layerCount;
1584 if (range->layerCount == VK_REMAINING_ARRAY_LAYERS) {
1585 array_layer_count = layers - range->baseArrayLayer;
1586 }
1587 return array_layer_count;
1588 }
1589
VerifyClearImageLayout(layer_data * device_data,GLOBAL_CB_NODE * cb_node,IMAGE_STATE * image_state,VkImageSubresourceRange range,VkImageLayout dest_image_layout,const char * func_name)1590 bool CoreChecks::VerifyClearImageLayout(layer_data *device_data, GLOBAL_CB_NODE *cb_node, IMAGE_STATE *image_state,
1591 VkImageSubresourceRange range, VkImageLayout dest_image_layout, const char *func_name) {
1592 bool skip = false;
1593
1594 uint32_t level_count = ResolveRemainingLevels(&range, image_state->createInfo.mipLevels);
1595 uint32_t layer_count = ResolveRemainingLayers(&range, image_state->createInfo.arrayLayers);
1596
1597 if (dest_image_layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) {
1598 if (dest_image_layout == VK_IMAGE_LAYOUT_GENERAL) {
1599 if (image_state->createInfo.tiling != VK_IMAGE_TILING_LINEAR) {
1600 // LAYOUT_GENERAL is allowed, but may not be performance optimal, flag as perf warning.
1601 skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
1602 HandleToUint64(image_state->image), kVUID_Core_DrawState_InvalidImageLayout,
1603 "%s: Layout for cleared image should be TRANSFER_DST_OPTIMAL instead of GENERAL.", func_name);
1604 }
1605 } else if (VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR == dest_image_layout) {
1606 if (!GetDeviceExtensions()->vk_khr_shared_presentable_image) {
1607 // TODO: Add unique error id when available.
1608 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
1609 HandleToUint64(image_state->image), 0,
1610 "Must enable VK_KHR_shared_presentable_image extension before creating images with a layout type "
1611 "of VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR.");
1612
1613 } else {
1614 if (image_state->shared_presentable) {
1615 skip |= log_msg(
1616 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
1617 HandleToUint64(image_state->image), 0,
1618 "Layout for shared presentable cleared image is %s but can only be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR.",
1619 string_VkImageLayout(dest_image_layout));
1620 }
1621 }
1622 } else {
1623 const char *error_code = "VUID-vkCmdClearColorImage-imageLayout-00005";
1624 if (strcmp(func_name, "vkCmdClearDepthStencilImage()") == 0) {
1625 error_code = "VUID-vkCmdClearDepthStencilImage-imageLayout-00012";
1626 } else {
1627 assert(strcmp(func_name, "vkCmdClearColorImage()") == 0);
1628 }
1629 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
1630 HandleToUint64(image_state->image), error_code,
1631 "%s: Layout for cleared image is %s but can only be TRANSFER_DST_OPTIMAL or GENERAL.", func_name,
1632 string_VkImageLayout(dest_image_layout));
1633 }
1634 }
1635
1636 for (uint32_t level_index = 0; level_index < level_count; ++level_index) {
1637 uint32_t level = level_index + range.baseMipLevel;
1638 for (uint32_t layer_index = 0; layer_index < layer_count; ++layer_index) {
1639 uint32_t layer = layer_index + range.baseArrayLayer;
1640 VkImageSubresource sub = {range.aspectMask, level, layer};
1641 IMAGE_CMD_BUF_LAYOUT_NODE node;
1642 if (FindCmdBufLayout(device_data, cb_node, image_state->image, sub, node)) {
1643 if (node.layout != dest_image_layout) {
1644 const char *error_code = "VUID-vkCmdClearColorImage-imageLayout-00004";
1645 if (strcmp(func_name, "vkCmdClearDepthStencilImage()") == 0) {
1646 error_code = "VUID-vkCmdClearDepthStencilImage-imageLayout-00011";
1647 } else {
1648 assert(strcmp(func_name, "vkCmdClearColorImage()") == 0);
1649 }
1650 skip |=
1651 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0,
1652 error_code, "%s: Cannot clear an image whose layout is %s and doesn't match the current layout %s.",
1653 func_name, string_VkImageLayout(dest_image_layout), string_VkImageLayout(node.layout));
1654 }
1655 }
1656 }
1657 }
1658
1659 return skip;
1660 }
1661
RecordClearImageLayout(layer_data * device_data,GLOBAL_CB_NODE * cb_node,VkImage image,VkImageSubresourceRange range,VkImageLayout dest_image_layout)1662 void CoreChecks::RecordClearImageLayout(layer_data *device_data, GLOBAL_CB_NODE *cb_node, VkImage image,
1663 VkImageSubresourceRange range, VkImageLayout dest_image_layout) {
1664 VkImageCreateInfo *image_create_info = &(GetImageState(image)->createInfo);
1665 uint32_t level_count = ResolveRemainingLevels(&range, image_create_info->mipLevels);
1666 uint32_t layer_count = ResolveRemainingLayers(&range, image_create_info->arrayLayers);
1667
1668 for (uint32_t level_index = 0; level_index < level_count; ++level_index) {
1669 uint32_t level = level_index + range.baseMipLevel;
1670 for (uint32_t layer_index = 0; layer_index < layer_count; ++layer_index) {
1671 uint32_t layer = layer_index + range.baseArrayLayer;
1672 VkImageSubresource sub = {range.aspectMask, level, layer};
1673 IMAGE_CMD_BUF_LAYOUT_NODE node;
1674 if (!FindCmdBufLayout(device_data, cb_node, image, sub, node)) {
1675 SetLayout(device_data, cb_node, image, sub, IMAGE_CMD_BUF_LAYOUT_NODE(dest_image_layout, dest_image_layout));
1676 }
1677 }
1678 }
1679 }
1680
PreCallValidateCmdClearColorImage(VkCommandBuffer commandBuffer,VkImage image,VkImageLayout imageLayout,const VkClearColorValue * pColor,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)1681 bool CoreChecks::PreCallValidateCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
1682 const VkClearColorValue *pColor, uint32_t rangeCount,
1683 const VkImageSubresourceRange *pRanges) {
1684 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
1685
1686 bool skip = false;
1687 // TODO : Verify memory is in VK_IMAGE_STATE_CLEAR state
1688 auto cb_node = GetCBNode(commandBuffer);
1689 auto image_state = GetImageState(image);
1690 if (cb_node && image_state) {
1691 skip |= ValidateMemoryIsBoundToImage(device_data, image_state, "vkCmdClearColorImage()",
1692 "VUID-vkCmdClearColorImage-image-00003");
1693 skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdClearColorImage()", VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
1694 "VUID-vkCmdClearColorImage-commandBuffer-cmdpool");
1695 skip |= ValidateCmd(device_data, cb_node, CMD_CLEARCOLORIMAGE, "vkCmdClearColorImage()");
1696 if (GetApiVersion() >= VK_API_VERSION_1_1 || GetDeviceExtensions()->vk_khr_maintenance1) {
1697 skip |= ValidateImageFormatFeatureFlags(device_data, image_state, VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
1698 "vkCmdClearColorImage", "VUID-vkCmdClearColorImage-image-01993",
1699 "VUID-vkCmdClearColorImage-image-01993");
1700 }
1701 skip |= InsideRenderPass(device_data, cb_node, "vkCmdClearColorImage()", "VUID-vkCmdClearColorImage-renderpass");
1702 for (uint32_t i = 0; i < rangeCount; ++i) {
1703 std::string param_name = "pRanges[" + std::to_string(i) + "]";
1704 skip |= ValidateCmdClearColorSubresourceRange(device_data, image_state, pRanges[i], param_name.c_str());
1705 skip |= ValidateImageAttributes(device_data, image_state, pRanges[i]);
1706 skip |= VerifyClearImageLayout(device_data, cb_node, image_state, pRanges[i], imageLayout, "vkCmdClearColorImage()");
1707 }
1708 }
1709 return skip;
1710 }
1711
PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer,VkImage image,VkImageLayout imageLayout,const VkClearColorValue * pColor,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)1712 void CoreChecks::PreCallRecordCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
1713 const VkClearColorValue *pColor, uint32_t rangeCount,
1714 const VkImageSubresourceRange *pRanges) {
1715 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
1716
1717 auto cb_node = GetCBNode(commandBuffer);
1718 auto image_state = GetImageState(image);
1719 if (cb_node && image_state) {
1720 AddCommandBufferBindingImage(device_data, cb_node, image_state);
1721 for (uint32_t i = 0; i < rangeCount; ++i) {
1722 RecordClearImageLayout(device_data, cb_node, image, pRanges[i], imageLayout);
1723 }
1724 }
1725 }
1726
PreCallValidateCmdClearDepthStencilImage(VkCommandBuffer commandBuffer,VkImage image,VkImageLayout imageLayout,const VkClearDepthStencilValue * pDepthStencil,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)1727 bool CoreChecks::PreCallValidateCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
1728 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
1729 const VkImageSubresourceRange *pRanges) {
1730 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
1731 bool skip = false;
1732
1733 // TODO : Verify memory is in VK_IMAGE_STATE_CLEAR state
1734 auto cb_node = GetCBNode(commandBuffer);
1735 auto image_state = GetImageState(image);
1736 if (cb_node && image_state) {
1737 skip |= ValidateMemoryIsBoundToImage(device_data, image_state, "vkCmdClearDepthStencilImage()",
1738 "VUID-vkCmdClearDepthStencilImage-image-00010");
1739 skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdClearDepthStencilImage()", VK_QUEUE_GRAPHICS_BIT,
1740 "VUID-vkCmdClearDepthStencilImage-commandBuffer-cmdpool");
1741 skip |= ValidateCmd(device_data, cb_node, CMD_CLEARDEPTHSTENCILIMAGE, "vkCmdClearDepthStencilImage()");
1742 if (GetApiVersion() >= VK_API_VERSION_1_1 || GetDeviceExtensions()->vk_khr_maintenance1) {
1743 skip |= ValidateImageFormatFeatureFlags(device_data, image_state, VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
1744 "vkCmdClearDepthStencilImage", "VUID-vkCmdClearDepthStencilImage-image-01994",
1745 "VUID-vkCmdClearDepthStencilImage-image-01994");
1746 }
1747 skip |=
1748 InsideRenderPass(device_data, cb_node, "vkCmdClearDepthStencilImage()", "VUID-vkCmdClearDepthStencilImage-renderpass");
1749 for (uint32_t i = 0; i < rangeCount; ++i) {
1750 std::string param_name = "pRanges[" + std::to_string(i) + "]";
1751 skip |= ValidateCmdClearDepthSubresourceRange(device_data, image_state, pRanges[i], param_name.c_str());
1752 skip |=
1753 VerifyClearImageLayout(device_data, cb_node, image_state, pRanges[i], imageLayout, "vkCmdClearDepthStencilImage()");
1754 // Image aspect must be depth or stencil or both
1755 VkImageAspectFlags valid_aspects = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
1756 if (((pRanges[i].aspectMask & valid_aspects) == 0) || ((pRanges[i].aspectMask & ~valid_aspects) != 0)) {
1757 char const str[] =
1758 "vkCmdClearDepthStencilImage aspectMasks for all subresource ranges must be set to VK_IMAGE_ASPECT_DEPTH_BIT "
1759 "and/or VK_IMAGE_ASPECT_STENCIL_BIT";
1760 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
1761 HandleToUint64(commandBuffer), kVUID_Core_DrawState_InvalidImageAspect, str);
1762 }
1763 }
1764 if (image_state && !FormatIsDepthOrStencil(image_state->createInfo.format)) {
1765 char const str[] = "vkCmdClearDepthStencilImage called without a depth/stencil image.";
1766 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
1767 HandleToUint64(image), "VUID-vkCmdClearDepthStencilImage-image-00014", "%s.", str);
1768 }
1769 if (VK_IMAGE_USAGE_TRANSFER_DST_BIT != (VK_IMAGE_USAGE_TRANSFER_DST_BIT & image_state->createInfo.usage)) {
1770 char const str[] =
1771 "vkCmdClearDepthStencilImage() called with an image that was not created with the VK_IMAGE_USAGE_TRANSFER_DST_BIT "
1772 "set.";
1773 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
1774 HandleToUint64(image), "VUID-vkCmdClearDepthStencilImage-image-00009", "%s.", str);
1775 }
1776 }
1777 return skip;
1778 }
1779
PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer,VkImage image,VkImageLayout imageLayout,const VkClearDepthStencilValue * pDepthStencil,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)1780 void CoreChecks::PreCallRecordCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
1781 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
1782 const VkImageSubresourceRange *pRanges) {
1783 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
1784
1785 auto cb_node = GetCBNode(commandBuffer);
1786 auto image_state = GetImageState(image);
1787 if (cb_node && image_state) {
1788 AddCommandBufferBindingImage(device_data, cb_node, image_state);
1789 for (uint32_t i = 0; i < rangeCount; ++i) {
1790 RecordClearImageLayout(device_data, cb_node, image, pRanges[i], imageLayout);
1791 }
1792 }
1793 }
1794
1795 // Returns true if [x, xoffset] and [y, yoffset] overlap
RangesIntersect(int32_t start,uint32_t start_offset,int32_t end,uint32_t end_offset)1796 static bool RangesIntersect(int32_t start, uint32_t start_offset, int32_t end, uint32_t end_offset) {
1797 bool result = false;
1798 uint32_t intersection_min = std::max(static_cast<uint32_t>(start), static_cast<uint32_t>(end));
1799 uint32_t intersection_max = std::min(static_cast<uint32_t>(start) + start_offset, static_cast<uint32_t>(end) + end_offset);
1800
1801 if (intersection_max > intersection_min) {
1802 result = true;
1803 }
1804 return result;
1805 }
1806
1807 // Returns true if source area of first copy region intersects dest area of second region
1808 // It is assumed that these are copy regions within a single image (otherwise no possibility of collision)
RegionIntersects(const VkImageCopy * rgn0,const VkImageCopy * rgn1,VkImageType type,bool is_multiplane)1809 static bool RegionIntersects(const VkImageCopy *rgn0, const VkImageCopy *rgn1, VkImageType type, bool is_multiplane) {
1810 bool result = false;
1811
1812 // Separate planes within a multiplane image cannot intersect
1813 if (is_multiplane && (rgn0->srcSubresource.aspectMask != rgn1->dstSubresource.aspectMask)) {
1814 return result;
1815 }
1816
1817 if ((rgn0->srcSubresource.mipLevel == rgn1->dstSubresource.mipLevel) &&
1818 (RangesIntersect(rgn0->srcSubresource.baseArrayLayer, rgn0->srcSubresource.layerCount, rgn1->dstSubresource.baseArrayLayer,
1819 rgn1->dstSubresource.layerCount))) {
1820 result = true;
1821 switch (type) {
1822 case VK_IMAGE_TYPE_3D:
1823 result &= RangesIntersect(rgn0->srcOffset.z, rgn0->extent.depth, rgn1->dstOffset.z, rgn1->extent.depth);
1824 // fall through
1825 case VK_IMAGE_TYPE_2D:
1826 result &= RangesIntersect(rgn0->srcOffset.y, rgn0->extent.height, rgn1->dstOffset.y, rgn1->extent.height);
1827 // fall through
1828 case VK_IMAGE_TYPE_1D:
1829 result &= RangesIntersect(rgn0->srcOffset.x, rgn0->extent.width, rgn1->dstOffset.x, rgn1->extent.width);
1830 break;
1831 default:
1832 // Unrecognized or new IMAGE_TYPE enums will be caught in parameter_validation
1833 assert(false);
1834 }
1835 }
1836 return result;
1837 }
1838
1839 // Returns non-zero if offset and extent exceed image extents
1840 static const uint32_t x_bit = 1;
1841 static const uint32_t y_bit = 2;
1842 static const uint32_t z_bit = 4;
ExceedsBounds(const VkOffset3D * offset,const VkExtent3D * extent,const VkExtent3D * image_extent)1843 static uint32_t ExceedsBounds(const VkOffset3D *offset, const VkExtent3D *extent, const VkExtent3D *image_extent) {
1844 uint32_t result = 0;
1845 // Extents/depths cannot be negative but checks left in for clarity
1846 if ((offset->z + extent->depth > image_extent->depth) || (offset->z < 0) ||
1847 ((offset->z + static_cast<int32_t>(extent->depth)) < 0)) {
1848 result |= z_bit;
1849 }
1850 if ((offset->y + extent->height > image_extent->height) || (offset->y < 0) ||
1851 ((offset->y + static_cast<int32_t>(extent->height)) < 0)) {
1852 result |= y_bit;
1853 }
1854 if ((offset->x + extent->width > image_extent->width) || (offset->x < 0) ||
1855 ((offset->x + static_cast<int32_t>(extent->width)) < 0)) {
1856 result |= x_bit;
1857 }
1858 return result;
1859 }
1860
1861 // Test if two VkExtent3D structs are equivalent
IsExtentEqual(const VkExtent3D * extent,const VkExtent3D * other_extent)1862 static inline bool IsExtentEqual(const VkExtent3D *extent, const VkExtent3D *other_extent) {
1863 bool result = true;
1864 if ((extent->width != other_extent->width) || (extent->height != other_extent->height) ||
1865 (extent->depth != other_extent->depth)) {
1866 result = false;
1867 }
1868 return result;
1869 }
1870
1871 // For image copies between compressed/uncompressed formats, the extent is provided in source image texels
1872 // Destination image texel extents must be adjusted by block size for the dest validation checks
GetAdjustedDestImageExtent(VkFormat src_format,VkFormat dst_format,VkExtent3D extent)1873 VkExtent3D GetAdjustedDestImageExtent(VkFormat src_format, VkFormat dst_format, VkExtent3D extent) {
1874 VkExtent3D adjusted_extent = extent;
1875 if ((FormatIsCompressed(src_format) && (!FormatIsCompressed(dst_format)))) {
1876 VkExtent3D block_size = FormatTexelBlockExtent(src_format);
1877 adjusted_extent.width /= block_size.width;
1878 adjusted_extent.height /= block_size.height;
1879 adjusted_extent.depth /= block_size.depth;
1880 } else if ((!FormatIsCompressed(src_format) && (FormatIsCompressed(dst_format)))) {
1881 VkExtent3D block_size = FormatTexelBlockExtent(dst_format);
1882 adjusted_extent.width *= block_size.width;
1883 adjusted_extent.height *= block_size.height;
1884 adjusted_extent.depth *= block_size.depth;
1885 }
1886 return adjusted_extent;
1887 }
1888
1889 // Returns the effective extent of an image subresource, adjusted for mip level and array depth.
GetImageSubresourceExtent(const IMAGE_STATE * img,const VkImageSubresourceLayers * subresource)1890 static inline VkExtent3D GetImageSubresourceExtent(const IMAGE_STATE *img, const VkImageSubresourceLayers *subresource) {
1891 const uint32_t mip = subresource->mipLevel;
1892
1893 // Return zero extent if mip level doesn't exist
1894 if (mip >= img->createInfo.mipLevels) {
1895 return VkExtent3D{0, 0, 0};
1896 }
1897
1898 // Don't allow mip adjustment to create 0 dim, but pass along a 0 if that's what subresource specified
1899 VkExtent3D extent = img->createInfo.extent;
1900
1901 // If multi-plane, adjust per-plane extent
1902 if (FormatIsMultiplane(img->createInfo.format)) {
1903 VkExtent2D divisors = FindMultiplaneExtentDivisors(img->createInfo.format, subresource->aspectMask);
1904 extent.width /= divisors.width;
1905 extent.height /= divisors.height;
1906 }
1907
1908 if (img->createInfo.flags & VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV) {
1909 extent.width = (0 == extent.width ? 0 : std::max(2U, 1 + ((extent.width - 1) >> mip)));
1910 extent.height = (0 == extent.height ? 0 : std::max(2U, 1 + ((extent.height - 1) >> mip)));
1911 extent.depth = (0 == extent.depth ? 0 : std::max(2U, 1 + ((extent.depth - 1) >> mip)));
1912 } else {
1913 extent.width = (0 == extent.width ? 0 : std::max(1U, extent.width >> mip));
1914 extent.height = (0 == extent.height ? 0 : std::max(1U, extent.height >> mip));
1915 extent.depth = (0 == extent.depth ? 0 : std::max(1U, extent.depth >> mip));
1916 }
1917
1918 // Image arrays have an effective z extent that isn't diminished by mip level
1919 if (VK_IMAGE_TYPE_3D != img->createInfo.imageType) {
1920 extent.depth = img->createInfo.arrayLayers;
1921 }
1922
1923 return extent;
1924 }
1925
1926 // Test if the extent argument has all dimensions set to 0.
IsExtentAllZeroes(const VkExtent3D * extent)1927 static inline bool IsExtentAllZeroes(const VkExtent3D *extent) {
1928 return ((extent->width == 0) && (extent->height == 0) && (extent->depth == 0));
1929 }
1930
1931 // Test if the extent argument has any dimensions set to 0.
IsExtentSizeZero(const VkExtent3D * extent)1932 static inline bool IsExtentSizeZero(const VkExtent3D *extent) {
1933 return ((extent->width == 0) || (extent->height == 0) || (extent->depth == 0));
1934 }
1935
1936 // Returns the image transfer granularity for a specific image scaled by compressed block size if necessary.
GetScaledItg(layer_data * device_data,const GLOBAL_CB_NODE * cb_node,const IMAGE_STATE * img)1937 VkExtent3D CoreChecks::GetScaledItg(layer_data *device_data, const GLOBAL_CB_NODE *cb_node, const IMAGE_STATE *img) {
1938 // Default to (0, 0, 0) granularity in case we can't find the real granularity for the physical device.
1939 VkExtent3D granularity = {0, 0, 0};
1940 auto pPool = GetCommandPoolNode(cb_node->createInfo.commandPool);
1941 if (pPool) {
1942 granularity = GetPhysicalDeviceState()->queue_family_properties[pPool->queueFamilyIndex].minImageTransferGranularity;
1943 if (FormatIsCompressed(img->createInfo.format)) {
1944 auto block_size = FormatTexelBlockExtent(img->createInfo.format);
1945 granularity.width *= block_size.width;
1946 granularity.height *= block_size.height;
1947 }
1948 }
1949 return granularity;
1950 }
1951
1952 // Test elements of a VkExtent3D structure against alignment constraints contained in another VkExtent3D structure
IsExtentAligned(const VkExtent3D * extent,const VkExtent3D * granularity)1953 static inline bool IsExtentAligned(const VkExtent3D *extent, const VkExtent3D *granularity) {
1954 bool valid = true;
1955 if ((SafeModulo(extent->depth, granularity->depth) != 0) || (SafeModulo(extent->width, granularity->width) != 0) ||
1956 (SafeModulo(extent->height, granularity->height) != 0)) {
1957 valid = false;
1958 }
1959 return valid;
1960 }
1961
1962 // Check elements of a VkOffset3D structure against a queue family's Image Transfer Granularity values
CheckItgOffset(layer_data * device_data,const GLOBAL_CB_NODE * cb_node,const VkOffset3D * offset,const VkExtent3D * granularity,const uint32_t i,const char * function,const char * member,const char * vuid)1963 bool CoreChecks::CheckItgOffset(layer_data *device_data, const GLOBAL_CB_NODE *cb_node, const VkOffset3D *offset,
1964 const VkExtent3D *granularity, const uint32_t i, const char *function, const char *member,
1965 const char *vuid) {
1966 bool skip = false;
1967 VkExtent3D offset_extent = {};
1968 offset_extent.width = static_cast<uint32_t>(abs(offset->x));
1969 offset_extent.height = static_cast<uint32_t>(abs(offset->y));
1970 offset_extent.depth = static_cast<uint32_t>(abs(offset->z));
1971 if (IsExtentAllZeroes(granularity)) {
1972 // If the queue family image transfer granularity is (0, 0, 0), then the offset must always be (0, 0, 0)
1973 if (IsExtentAllZeroes(&offset_extent) == false) {
1974 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
1975 HandleToUint64(cb_node->commandBuffer), vuid,
1976 "%s: pRegion[%d].%s (x=%d, y=%d, z=%d) must be (x=0, y=0, z=0) when the command buffer's queue family "
1977 "image transfer granularity is (w=0, h=0, d=0).",
1978 function, i, member, offset->x, offset->y, offset->z);
1979 }
1980 } else {
1981 // If the queue family image transfer granularity is not (0, 0, 0), then the offset dimensions must always be even
1982 // integer multiples of the image transfer granularity.
1983 if (IsExtentAligned(&offset_extent, granularity) == false) {
1984 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
1985 HandleToUint64(cb_node->commandBuffer), vuid,
1986 "%s: pRegion[%d].%s (x=%d, y=%d, z=%d) dimensions must be even integer multiples of this command "
1987 "buffer's queue family image transfer granularity (w=%d, h=%d, d=%d).",
1988 function, i, member, offset->x, offset->y, offset->z, granularity->width, granularity->height,
1989 granularity->depth);
1990 }
1991 }
1992 return skip;
1993 }
1994
1995 // Check elements of a VkExtent3D structure against a queue family's Image Transfer Granularity values
CheckItgExtent(layer_data * device_data,const GLOBAL_CB_NODE * cb_node,const VkExtent3D * extent,const VkOffset3D * offset,const VkExtent3D * granularity,const VkExtent3D * subresource_extent,const VkImageType image_type,const uint32_t i,const char * function,const char * member,const char * vuid)1996 bool CoreChecks::CheckItgExtent(layer_data *device_data, const GLOBAL_CB_NODE *cb_node, const VkExtent3D *extent,
1997 const VkOffset3D *offset, const VkExtent3D *granularity, const VkExtent3D *subresource_extent,
1998 const VkImageType image_type, const uint32_t i, const char *function, const char *member,
1999 const char *vuid) {
2000 bool skip = false;
2001 if (IsExtentAllZeroes(granularity)) {
2002 // If the queue family image transfer granularity is (0, 0, 0), then the extent must always match the image
2003 // subresource extent.
2004 if (IsExtentEqual(extent, subresource_extent) == false) {
2005 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2006 HandleToUint64(cb_node->commandBuffer), vuid,
2007 "%s: pRegion[%d].%s (w=%d, h=%d, d=%d) must match the image subresource extents (w=%d, h=%d, d=%d) "
2008 "when the command buffer's queue family image transfer granularity is (w=0, h=0, d=0).",
2009 function, i, member, extent->width, extent->height, extent->depth, subresource_extent->width,
2010 subresource_extent->height, subresource_extent->depth);
2011 }
2012 } else {
2013 // If the queue family image transfer granularity is not (0, 0, 0), then the extent dimensions must always be even
2014 // integer multiples of the image transfer granularity or the offset + extent dimensions must always match the image
2015 // subresource extent dimensions.
2016 VkExtent3D offset_extent_sum = {};
2017 offset_extent_sum.width = static_cast<uint32_t>(abs(offset->x)) + extent->width;
2018 offset_extent_sum.height = static_cast<uint32_t>(abs(offset->y)) + extent->height;
2019 offset_extent_sum.depth = static_cast<uint32_t>(abs(offset->z)) + extent->depth;
2020 bool x_ok = true;
2021 bool y_ok = true;
2022 bool z_ok = true;
2023 switch (image_type) {
2024 case VK_IMAGE_TYPE_3D:
2025 z_ok = ((0 == SafeModulo(extent->depth, granularity->depth)) ||
2026 (subresource_extent->depth == offset_extent_sum.depth));
2027 // fall through
2028 case VK_IMAGE_TYPE_2D:
2029 y_ok = ((0 == SafeModulo(extent->height, granularity->height)) ||
2030 (subresource_extent->height == offset_extent_sum.height));
2031 // fall through
2032 case VK_IMAGE_TYPE_1D:
2033 x_ok = ((0 == SafeModulo(extent->width, granularity->width)) ||
2034 (subresource_extent->width == offset_extent_sum.width));
2035 break;
2036 default:
2037 // Unrecognized or new IMAGE_TYPE enums will be caught in parameter_validation
2038 assert(false);
2039 }
2040 if (!(x_ok && y_ok && z_ok)) {
2041 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2042 HandleToUint64(cb_node->commandBuffer), vuid,
2043 "%s: pRegion[%d].%s (w=%d, h=%d, d=%d) dimensions must be even integer multiples of this command "
2044 "buffer's queue family image transfer granularity (w=%d, h=%d, d=%d) or offset (x=%d, y=%d, z=%d) + "
2045 "extent (w=%d, h=%d, d=%d) must match the image subresource extents (w=%d, h=%d, d=%d).",
2046 function, i, member, extent->width, extent->height, extent->depth, granularity->width,
2047 granularity->height, granularity->depth, offset->x, offset->y, offset->z, extent->width, extent->height,
2048 extent->depth, subresource_extent->width, subresource_extent->height, subresource_extent->depth);
2049 }
2050 }
2051 return skip;
2052 }
2053
ValidateImageMipLevel(layer_data * device_data,const GLOBAL_CB_NODE * cb_node,const IMAGE_STATE * img,uint32_t mip_level,const uint32_t i,const char * function,const char * member,const char * vuid)2054 bool CoreChecks::ValidateImageMipLevel(layer_data *device_data, const GLOBAL_CB_NODE *cb_node, const IMAGE_STATE *img,
2055 uint32_t mip_level, const uint32_t i, const char *function, const char *member,
2056 const char *vuid) {
2057 bool skip = false;
2058 if (mip_level >= img->createInfo.mipLevels) {
2059 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2060 HandleToUint64(cb_node->commandBuffer), vuid,
2061 "In %s, pRegions[%u].%s.mipLevel is %u, but provided image %s has %u mip levels.", function, i, member,
2062 mip_level, report_data->FormatHandle(img->image).c_str(), img->createInfo.mipLevels);
2063 }
2064 return skip;
2065 }
2066
ValidateImageArrayLayerRange(layer_data * device_data,const GLOBAL_CB_NODE * cb_node,const IMAGE_STATE * img,const uint32_t base_layer,const uint32_t layer_count,const uint32_t i,const char * function,const char * member,const char * vuid)2067 bool CoreChecks::ValidateImageArrayLayerRange(layer_data *device_data, const GLOBAL_CB_NODE *cb_node, const IMAGE_STATE *img,
2068 const uint32_t base_layer, const uint32_t layer_count, const uint32_t i,
2069 const char *function, const char *member, const char *vuid) {
2070 bool skip = false;
2071 if (base_layer >= img->createInfo.arrayLayers || layer_count > img->createInfo.arrayLayers ||
2072 (base_layer + layer_count) > img->createInfo.arrayLayers) {
2073 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2074 HandleToUint64(cb_node->commandBuffer), vuid,
2075 "In %s, pRegions[%u].%s.baseArrayLayer is %u and .layerCount is "
2076 "%u, but provided image %s has %u array layers.",
2077 function, i, member, base_layer, layer_count, report_data->FormatHandle(img->image).c_str(),
2078 img->createInfo.arrayLayers);
2079 }
2080 return skip;
2081 }
2082
2083 // Check valid usage Image Transfer Granularity requirements for elements of a VkBufferImageCopy structure
ValidateCopyBufferImageTransferGranularityRequirements(layer_data * device_data,const GLOBAL_CB_NODE * cb_node,const IMAGE_STATE * img,const VkBufferImageCopy * region,const uint32_t i,const char * function,const char * vuid)2084 bool CoreChecks::ValidateCopyBufferImageTransferGranularityRequirements(layer_data *device_data, const GLOBAL_CB_NODE *cb_node,
2085 const IMAGE_STATE *img, const VkBufferImageCopy *region,
2086 const uint32_t i, const char *function, const char *vuid) {
2087 bool skip = false;
2088 VkExtent3D granularity = GetScaledItg(device_data, cb_node, img);
2089 skip |= CheckItgOffset(device_data, cb_node, ®ion->imageOffset, &granularity, i, function, "imageOffset", vuid);
2090 VkExtent3D subresource_extent = GetImageSubresourceExtent(img, ®ion->imageSubresource);
2091 skip |= CheckItgExtent(device_data, cb_node, ®ion->imageExtent, ®ion->imageOffset, &granularity, &subresource_extent,
2092 img->createInfo.imageType, i, function, "imageExtent", vuid);
2093 return skip;
2094 }
2095
2096 // Check valid usage Image Transfer Granularity requirements for elements of a VkImageCopy structure
ValidateCopyImageTransferGranularityRequirements(layer_data * device_data,const GLOBAL_CB_NODE * cb_node,const IMAGE_STATE * src_img,const IMAGE_STATE * dst_img,const VkImageCopy * region,const uint32_t i,const char * function)2097 bool CoreChecks::ValidateCopyImageTransferGranularityRequirements(layer_data *device_data, const GLOBAL_CB_NODE *cb_node,
2098 const IMAGE_STATE *src_img, const IMAGE_STATE *dst_img,
2099 const VkImageCopy *region, const uint32_t i,
2100 const char *function) {
2101 bool skip = false;
2102 // Source image checks
2103 VkExtent3D granularity = GetScaledItg(device_data, cb_node, src_img);
2104 skip |= CheckItgOffset(device_data, cb_node, ®ion->srcOffset, &granularity, i, function, "srcOffset",
2105 "VUID-vkCmdCopyImage-srcOffset-01783");
2106 VkExtent3D subresource_extent = GetImageSubresourceExtent(src_img, ®ion->srcSubresource);
2107 const VkExtent3D extent = region->extent;
2108 skip |= CheckItgExtent(device_data, cb_node, &extent, ®ion->srcOffset, &granularity, &subresource_extent,
2109 src_img->createInfo.imageType, i, function, "extent", "VUID-vkCmdCopyImage-srcOffset-01783");
2110
2111 // Destination image checks
2112 granularity = GetScaledItg(device_data, cb_node, dst_img);
2113 skip |= CheckItgOffset(device_data, cb_node, ®ion->dstOffset, &granularity, i, function, "dstOffset",
2114 "VUID-vkCmdCopyImage-dstOffset-01784");
2115 // Adjust dest extent, if necessary
2116 const VkExtent3D dest_effective_extent =
2117 GetAdjustedDestImageExtent(src_img->createInfo.format, dst_img->createInfo.format, extent);
2118 subresource_extent = GetImageSubresourceExtent(dst_img, ®ion->dstSubresource);
2119 skip |= CheckItgExtent(device_data, cb_node, &dest_effective_extent, ®ion->dstOffset, &granularity, &subresource_extent,
2120 dst_img->createInfo.imageType, i, function, "extent", "VUID-vkCmdCopyImage-dstOffset-01784");
2121 return skip;
2122 }
2123
2124 // Validate contents of a VkImageCopy struct
ValidateImageCopyData(const layer_data * device_data,const debug_report_data * report_data,const uint32_t regionCount,const VkImageCopy * ic_regions,const IMAGE_STATE * src_state,const IMAGE_STATE * dst_state)2125 bool CoreChecks::ValidateImageCopyData(const layer_data *device_data, const debug_report_data *report_data,
2126 const uint32_t regionCount, const VkImageCopy *ic_regions, const IMAGE_STATE *src_state,
2127 const IMAGE_STATE *dst_state) {
2128 bool skip = false;
2129
2130 for (uint32_t i = 0; i < regionCount; i++) {
2131 const VkImageCopy region = ic_regions[i];
2132
2133 // For comp<->uncomp copies, the copy extent for the dest image must be adjusted
2134 const VkExtent3D src_copy_extent = region.extent;
2135 const VkExtent3D dst_copy_extent =
2136 GetAdjustedDestImageExtent(src_state->createInfo.format, dst_state->createInfo.format, region.extent);
2137
2138 bool slice_override = false;
2139 uint32_t depth_slices = 0;
2140
2141 // Special case for copying between a 1D/2D array and a 3D image
2142 // TBD: This seems like the only way to reconcile 3 mutually-exclusive VU checks for 2D/3D copies. Heads up.
2143 if ((VK_IMAGE_TYPE_3D == src_state->createInfo.imageType) && (VK_IMAGE_TYPE_3D != dst_state->createInfo.imageType)) {
2144 depth_slices = region.dstSubresource.layerCount; // Slice count from 2D subresource
2145 slice_override = (depth_slices != 1);
2146 } else if ((VK_IMAGE_TYPE_3D == dst_state->createInfo.imageType) && (VK_IMAGE_TYPE_3D != src_state->createInfo.imageType)) {
2147 depth_slices = region.srcSubresource.layerCount; // Slice count from 2D subresource
2148 slice_override = (depth_slices != 1);
2149 }
2150
2151 // Do all checks on source image
2152 //
2153 if (src_state->createInfo.imageType == VK_IMAGE_TYPE_1D) {
2154 if ((0 != region.srcOffset.y) || (1 != src_copy_extent.height)) {
2155 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
2156 HandleToUint64(src_state->image), "VUID-VkImageCopy-srcImage-00146",
2157 "vkCmdCopyImage(): pRegion[%d] srcOffset.y is %d and extent.height is %d. For 1D images these must "
2158 "be 0 and 1, respectively.",
2159 i, region.srcOffset.y, src_copy_extent.height);
2160 }
2161 }
2162
2163 // VUID-VkImageCopy-srcImage-01785
2164 if ((src_state->createInfo.imageType == VK_IMAGE_TYPE_1D) && ((0 != region.srcOffset.z) || (1 != src_copy_extent.depth))) {
2165 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
2166 HandleToUint64(src_state->image), "VUID-VkImageCopy-srcImage-01785",
2167 "vkCmdCopyImage(): pRegion[%d] srcOffset.z is %d and extent.depth is %d. For 1D images "
2168 "these must be 0 and 1, respectively.",
2169 i, region.srcOffset.z, src_copy_extent.depth);
2170 }
2171
2172 // VUID-VkImageCopy-srcImage-01787
2173 if ((src_state->createInfo.imageType == VK_IMAGE_TYPE_2D) && (0 != region.srcOffset.z)) {
2174 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
2175 HandleToUint64(src_state->image), "VUID-VkImageCopy-srcImage-01787",
2176 "vkCmdCopyImage(): pRegion[%d] srcOffset.z is %d. For 2D images the z-offset must be 0.", i,
2177 region.srcOffset.z);
2178 }
2179
2180 if (GetDeviceExtensions()->vk_khr_maintenance1) {
2181 if (src_state->createInfo.imageType == VK_IMAGE_TYPE_3D) {
2182 if ((0 != region.srcSubresource.baseArrayLayer) || (1 != region.srcSubresource.layerCount)) {
2183 skip |=
2184 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
2185 HandleToUint64(src_state->image), "VUID-VkImageCopy-srcImage-00141",
2186 "vkCmdCopyImage(): pRegion[%d] srcSubresource.baseArrayLayer is %d and srcSubresource.layerCount "
2187 "is %d. For VK_IMAGE_TYPE_3D images these must be 0 and 1, respectively.",
2188 i, region.srcSubresource.baseArrayLayer, region.srcSubresource.layerCount);
2189 }
2190 }
2191 } else { // Pre maint 1
2192 if (src_state->createInfo.imageType == VK_IMAGE_TYPE_3D || dst_state->createInfo.imageType == VK_IMAGE_TYPE_3D) {
2193 if ((0 != region.srcSubresource.baseArrayLayer) || (1 != region.srcSubresource.layerCount)) {
2194 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
2195 HandleToUint64(src_state->image), "VUID-VkImageCopy-srcImage-00141",
2196 "vkCmdCopyImage(): pRegion[%d] srcSubresource.baseArrayLayer is %d and "
2197 "srcSubresource.layerCount is %d. For copies with either source or dest of type "
2198 "VK_IMAGE_TYPE_3D, these must be 0 and 1, respectively.",
2199 i, region.srcSubresource.baseArrayLayer, region.srcSubresource.layerCount);
2200 }
2201 }
2202 }
2203
2204 // Source checks that apply only to compressed images (or to _422 images if ycbcr enabled)
2205 bool ext_ycbcr = GetDeviceExtensions()->vk_khr_sampler_ycbcr_conversion;
2206 if (FormatIsCompressed(src_state->createInfo.format) ||
2207 (ext_ycbcr && FormatIsSinglePlane_422(src_state->createInfo.format))) {
2208 const VkExtent3D block_size = FormatTexelBlockExtent(src_state->createInfo.format);
2209 // image offsets must be multiples of block dimensions
2210 if ((SafeModulo(region.srcOffset.x, block_size.width) != 0) ||
2211 (SafeModulo(region.srcOffset.y, block_size.height) != 0) ||
2212 (SafeModulo(region.srcOffset.z, block_size.depth) != 0)) {
2213 const char *vuid = ext_ycbcr ? "VUID-VkImageCopy-srcImage-01727" : "VUID-VkImageCopy-srcOffset-00157";
2214 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
2215 HandleToUint64(src_state->image), vuid,
2216 "vkCmdCopyImage(): pRegion[%d] srcOffset (%d, %d) must be multiples of the compressed image's "
2217 "texel width & height (%d, %d).",
2218 i, region.srcOffset.x, region.srcOffset.y, block_size.width, block_size.height);
2219 }
2220
2221 const VkExtent3D mip_extent = GetImageSubresourceExtent(src_state, &(region.srcSubresource));
2222 if ((SafeModulo(src_copy_extent.width, block_size.width) != 0) &&
2223 (src_copy_extent.width + region.srcOffset.x != mip_extent.width)) {
2224 const char *vuid = ext_ycbcr ? "VUID-VkImageCopy-srcImage-01728" : "VUID-VkImageCopy-extent-00158";
2225 skip |=
2226 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
2227 HandleToUint64(src_state->image), vuid,
2228 "vkCmdCopyImage(): pRegion[%d] extent width (%d) must be a multiple of the compressed texture block "
2229 "width (%d), or when added to srcOffset.x (%d) must equal the image subresource width (%d).",
2230 i, src_copy_extent.width, block_size.width, region.srcOffset.x, mip_extent.width);
2231 }
2232
2233 // Extent height must be a multiple of block height, or extent+offset height must equal subresource height
2234 if ((SafeModulo(src_copy_extent.height, block_size.height) != 0) &&
2235 (src_copy_extent.height + region.srcOffset.y != mip_extent.height)) {
2236 const char *vuid = ext_ycbcr ? "VUID-VkImageCopy-srcImage-01729" : "VUID-VkImageCopy-extent-00159";
2237 skip |=
2238 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
2239 HandleToUint64(src_state->image), vuid,
2240 "vkCmdCopyImage(): pRegion[%d] extent height (%d) must be a multiple of the compressed texture block "
2241 "height (%d), or when added to srcOffset.y (%d) must equal the image subresource height (%d).",
2242 i, src_copy_extent.height, block_size.height, region.srcOffset.y, mip_extent.height);
2243 }
2244
2245 // Extent depth must be a multiple of block depth, or extent+offset depth must equal subresource depth
2246 uint32_t copy_depth = (slice_override ? depth_slices : src_copy_extent.depth);
2247 if ((SafeModulo(copy_depth, block_size.depth) != 0) && (copy_depth + region.srcOffset.z != mip_extent.depth)) {
2248 const char *vuid = ext_ycbcr ? "VUID-VkImageCopy-srcImage-01730" : "VUID-VkImageCopy-extent-00160";
2249 skip |=
2250 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
2251 HandleToUint64(src_state->image), vuid,
2252 "vkCmdCopyImage(): pRegion[%d] extent width (%d) must be a multiple of the compressed texture block "
2253 "depth (%d), or when added to srcOffset.z (%d) must equal the image subresource depth (%d).",
2254 i, src_copy_extent.depth, block_size.depth, region.srcOffset.z, mip_extent.depth);
2255 }
2256 } // Compressed
2257
2258 // Do all checks on dest image
2259 //
2260 if (dst_state->createInfo.imageType == VK_IMAGE_TYPE_1D) {
2261 if ((0 != region.dstOffset.y) || (1 != dst_copy_extent.height)) {
2262 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
2263 HandleToUint64(dst_state->image), "VUID-VkImageCopy-dstImage-00152",
2264 "vkCmdCopyImage(): pRegion[%d] dstOffset.y is %d and dst_copy_extent.height is %d. For 1D images "
2265 "these must be 0 and 1, respectively.",
2266 i, region.dstOffset.y, dst_copy_extent.height);
2267 }
2268 }
2269
2270 // VUID-VkImageCopy-dstImage-01786
2271 if ((dst_state->createInfo.imageType == VK_IMAGE_TYPE_1D) && ((0 != region.dstOffset.z) || (1 != dst_copy_extent.depth))) {
2272 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
2273 HandleToUint64(dst_state->image), "VUID-VkImageCopy-dstImage-01786",
2274 "vkCmdCopyImage(): pRegion[%d] dstOffset.z is %d and extent.depth is %d. For 1D images these must be 0 "
2275 "and 1, respectively.",
2276 i, region.dstOffset.z, dst_copy_extent.depth);
2277 }
2278
2279 // VUID-VkImageCopy-dstImage-01788
2280 if ((dst_state->createInfo.imageType == VK_IMAGE_TYPE_2D) && (0 != region.dstOffset.z)) {
2281 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
2282 HandleToUint64(dst_state->image), "VUID-VkImageCopy-dstImage-01788",
2283 "vkCmdCopyImage(): pRegion[%d] dstOffset.z is %d. For 2D images the z-offset must be 0.", i,
2284 region.dstOffset.z);
2285 }
2286
2287 if (dst_state->createInfo.imageType == VK_IMAGE_TYPE_3D) {
2288 if ((0 != region.dstSubresource.baseArrayLayer) || (1 != region.dstSubresource.layerCount)) {
2289 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
2290 HandleToUint64(dst_state->image), "VUID-VkImageCopy-srcImage-00141",
2291 "vkCmdCopyImage(): pRegion[%d] dstSubresource.baseArrayLayer is %d and dstSubresource.layerCount "
2292 "is %d. For VK_IMAGE_TYPE_3D images these must be 0 and 1, respectively.",
2293 i, region.dstSubresource.baseArrayLayer, region.dstSubresource.layerCount);
2294 }
2295 }
2296 // VU01199 changed with mnt1
2297 if (GetDeviceExtensions()->vk_khr_maintenance1) {
2298 if (dst_state->createInfo.imageType == VK_IMAGE_TYPE_3D) {
2299 if ((0 != region.dstSubresource.baseArrayLayer) || (1 != region.dstSubresource.layerCount)) {
2300 skip |=
2301 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
2302 HandleToUint64(dst_state->image), "VUID-VkImageCopy-srcImage-00141",
2303 "vkCmdCopyImage(): pRegion[%d] dstSubresource.baseArrayLayer is %d and dstSubresource.layerCount "
2304 "is %d. For VK_IMAGE_TYPE_3D images these must be 0 and 1, respectively.",
2305 i, region.dstSubresource.baseArrayLayer, region.dstSubresource.layerCount);
2306 }
2307 }
2308 } else { // Pre maint 1
2309 if (src_state->createInfo.imageType == VK_IMAGE_TYPE_3D || dst_state->createInfo.imageType == VK_IMAGE_TYPE_3D) {
2310 if ((0 != region.dstSubresource.baseArrayLayer) || (1 != region.dstSubresource.layerCount)) {
2311 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
2312 HandleToUint64(dst_state->image), "VUID-VkImageCopy-srcImage-00141",
2313 "vkCmdCopyImage(): pRegion[%d] dstSubresource.baseArrayLayer is %d and "
2314 "dstSubresource.layerCount is %d. For copies with either source or dest of type "
2315 "VK_IMAGE_TYPE_3D, these must be 0 and 1, respectively.",
2316 i, region.dstSubresource.baseArrayLayer, region.dstSubresource.layerCount);
2317 }
2318 }
2319 }
2320
2321 // Dest checks that apply only to compressed images (or to _422 images if ycbcr enabled)
2322 if (FormatIsCompressed(dst_state->createInfo.format) ||
2323 (ext_ycbcr && FormatIsSinglePlane_422(dst_state->createInfo.format))) {
2324 const VkExtent3D block_size = FormatTexelBlockExtent(dst_state->createInfo.format);
2325
2326 // image offsets must be multiples of block dimensions
2327 if ((SafeModulo(region.dstOffset.x, block_size.width) != 0) ||
2328 (SafeModulo(region.dstOffset.y, block_size.height) != 0) ||
2329 (SafeModulo(region.dstOffset.z, block_size.depth) != 0)) {
2330 const char *vuid = ext_ycbcr ? "VUID-VkImageCopy-dstImage-01731" : "VUID-VkImageCopy-dstOffset-00162";
2331 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
2332 HandleToUint64(dst_state->image), vuid,
2333 "vkCmdCopyImage(): pRegion[%d] dstOffset (%d, %d) must be multiples of the compressed image's "
2334 "texel width & height (%d, %d).",
2335 i, region.dstOffset.x, region.dstOffset.y, block_size.width, block_size.height);
2336 }
2337
2338 const VkExtent3D mip_extent = GetImageSubresourceExtent(dst_state, &(region.dstSubresource));
2339 if ((SafeModulo(dst_copy_extent.width, block_size.width) != 0) &&
2340 (dst_copy_extent.width + region.dstOffset.x != mip_extent.width)) {
2341 const char *vuid = ext_ycbcr ? "VUID-VkImageCopy-dstImage-01732" : "VUID-VkImageCopy-extent-00163";
2342 skip |=
2343 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
2344 HandleToUint64(dst_state->image), vuid,
2345 "vkCmdCopyImage(): pRegion[%d] dst_copy_extent width (%d) must be a multiple of the compressed texture "
2346 "block width (%d), or when added to dstOffset.x (%d) must equal the image subresource width (%d).",
2347 i, dst_copy_extent.width, block_size.width, region.dstOffset.x, mip_extent.width);
2348 }
2349
2350 // Extent height must be a multiple of block height, or dst_copy_extent+offset height must equal subresource height
2351 if ((SafeModulo(dst_copy_extent.height, block_size.height) != 0) &&
2352 (dst_copy_extent.height + region.dstOffset.y != mip_extent.height)) {
2353 const char *vuid = ext_ycbcr ? "VUID-VkImageCopy-dstImage-01733" : "VUID-VkImageCopy-extent-00164";
2354 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
2355 HandleToUint64(dst_state->image), vuid,
2356 "vkCmdCopyImage(): pRegion[%d] dst_copy_extent height (%d) must be a multiple of the compressed "
2357 "texture block height (%d), or when added to dstOffset.y (%d) must equal the image subresource "
2358 "height (%d).",
2359 i, dst_copy_extent.height, block_size.height, region.dstOffset.y, mip_extent.height);
2360 }
2361
2362 // Extent depth must be a multiple of block depth, or dst_copy_extent+offset depth must equal subresource depth
2363 uint32_t copy_depth = (slice_override ? depth_slices : dst_copy_extent.depth);
2364 if ((SafeModulo(copy_depth, block_size.depth) != 0) && (copy_depth + region.dstOffset.z != mip_extent.depth)) {
2365 const char *vuid = ext_ycbcr ? "VUID-VkImageCopy-dstImage-01734" : "VUID-VkImageCopy-extent-00165";
2366 skip |=
2367 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
2368 HandleToUint64(dst_state->image), vuid,
2369 "vkCmdCopyImage(): pRegion[%d] dst_copy_extent width (%d) must be a multiple of the compressed texture "
2370 "block depth (%d), or when added to dstOffset.z (%d) must equal the image subresource depth (%d).",
2371 i, dst_copy_extent.depth, block_size.depth, region.dstOffset.z, mip_extent.depth);
2372 }
2373 } // Compressed
2374 }
2375 return skip;
2376 }
2377
2378 // vkCmdCopyImage checks that only apply if the multiplane extension is enabled
CopyImageMultiplaneValidation(const layer_data * dev_data,VkCommandBuffer command_buffer,const IMAGE_STATE * src_image_state,const IMAGE_STATE * dst_image_state,const VkImageCopy region)2379 bool CoreChecks::CopyImageMultiplaneValidation(const layer_data *dev_data, VkCommandBuffer command_buffer,
2380 const IMAGE_STATE *src_image_state, const IMAGE_STATE *dst_image_state,
2381 const VkImageCopy region) {
2382 bool skip = false;
2383
2384 // Neither image is multiplane
2385 if ((!FormatIsMultiplane(src_image_state->createInfo.format)) && (!FormatIsMultiplane(dst_image_state->createInfo.format))) {
2386 // If neither image is multi-plane the aspectMask member of src and dst must match
2387 if (region.srcSubresource.aspectMask != region.dstSubresource.aspectMask) {
2388 std::stringstream ss;
2389 ss << "vkCmdCopyImage(): Copy between non-multiplane images with differing aspectMasks ( 0x" << std::hex
2390 << region.srcSubresource.aspectMask << " and 0x" << region.dstSubresource.aspectMask << " )";
2391 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2392 HandleToUint64(command_buffer), "VUID-VkImageCopy-srcImage-01551", "%s.", ss.str().c_str());
2393 }
2394 } else {
2395 // Source image multiplane checks
2396 uint32_t planes = FormatPlaneCount(src_image_state->createInfo.format);
2397 VkImageAspectFlags aspect = region.srcSubresource.aspectMask;
2398 if ((2 == planes) && (aspect != VK_IMAGE_ASPECT_PLANE_0_BIT_KHR) && (aspect != VK_IMAGE_ASPECT_PLANE_1_BIT_KHR)) {
2399 std::stringstream ss;
2400 ss << "vkCmdCopyImage(): Source image aspect mask (0x" << std::hex << aspect << ") is invalid for 2-plane format";
2401 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2402 HandleToUint64(command_buffer), "VUID-VkImageCopy-srcImage-01552", "%s.", ss.str().c_str());
2403 }
2404 if ((3 == planes) && (aspect != VK_IMAGE_ASPECT_PLANE_0_BIT_KHR) && (aspect != VK_IMAGE_ASPECT_PLANE_1_BIT_KHR) &&
2405 (aspect != VK_IMAGE_ASPECT_PLANE_2_BIT_KHR)) {
2406 std::stringstream ss;
2407 ss << "vkCmdCopyImage(): Source image aspect mask (0x" << std::hex << aspect << ") is invalid for 3-plane format";
2408 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2409 HandleToUint64(command_buffer), "VUID-VkImageCopy-srcImage-01553", "%s.", ss.str().c_str());
2410 }
2411 // Single-plane to multi-plane
2412 if ((!FormatIsMultiplane(src_image_state->createInfo.format)) && (FormatIsMultiplane(dst_image_state->createInfo.format)) &&
2413 (VK_IMAGE_ASPECT_COLOR_BIT != aspect)) {
2414 std::stringstream ss;
2415 ss << "vkCmdCopyImage(): Source image aspect mask (0x" << std::hex << aspect << ") is not VK_IMAGE_ASPECT_COLOR_BIT";
2416 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2417 HandleToUint64(command_buffer), "VUID-VkImageCopy-dstImage-01557", "%s.", ss.str().c_str());
2418 }
2419
2420 // Dest image multiplane checks
2421 planes = FormatPlaneCount(dst_image_state->createInfo.format);
2422 aspect = region.dstSubresource.aspectMask;
2423 if ((2 == planes) && (aspect != VK_IMAGE_ASPECT_PLANE_0_BIT_KHR) && (aspect != VK_IMAGE_ASPECT_PLANE_1_BIT_KHR)) {
2424 std::stringstream ss;
2425 ss << "vkCmdCopyImage(): Dest image aspect mask (0x" << std::hex << aspect << ") is invalid for 2-plane format";
2426 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2427 HandleToUint64(command_buffer), "VUID-VkImageCopy-dstImage-01554", "%s.", ss.str().c_str());
2428 }
2429 if ((3 == planes) && (aspect != VK_IMAGE_ASPECT_PLANE_0_BIT_KHR) && (aspect != VK_IMAGE_ASPECT_PLANE_1_BIT_KHR) &&
2430 (aspect != VK_IMAGE_ASPECT_PLANE_2_BIT_KHR)) {
2431 std::stringstream ss;
2432 ss << "vkCmdCopyImage(): Dest image aspect mask (0x" << std::hex << aspect << ") is invalid for 3-plane format";
2433 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2434 HandleToUint64(command_buffer), "VUID-VkImageCopy-dstImage-01555", "%s.", ss.str().c_str());
2435 }
2436 // Multi-plane to single-plane
2437 if ((FormatIsMultiplane(src_image_state->createInfo.format)) && (!FormatIsMultiplane(dst_image_state->createInfo.format)) &&
2438 (VK_IMAGE_ASPECT_COLOR_BIT != aspect)) {
2439 std::stringstream ss;
2440 ss << "vkCmdCopyImage(): Dest image aspect mask (0x" << std::hex << aspect << ") is not VK_IMAGE_ASPECT_COLOR_BIT";
2441 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2442 HandleToUint64(command_buffer), "VUID-VkImageCopy-srcImage-01556", "%s.", ss.str().c_str());
2443 }
2444 }
2445
2446 return skip;
2447 }
2448
PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageCopy * pRegions)2449 bool CoreChecks::PreCallValidateCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2450 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2451 const VkImageCopy *pRegions) {
2452 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
2453 auto cb_node = GetCBNode(commandBuffer);
2454 auto src_image_state = GetImageState(srcImage);
2455 auto dst_image_state = GetImageState(dstImage);
2456 bool skip = false;
2457
2458 skip = ValidateImageCopyData(device_data, report_data, regionCount, pRegions, src_image_state, dst_image_state);
2459
2460 VkCommandBuffer command_buffer = cb_node->commandBuffer;
2461
2462 for (uint32_t i = 0; i < regionCount; i++) {
2463 const VkImageCopy region = pRegions[i];
2464
2465 // For comp/uncomp copies, the copy extent for the dest image must be adjusted
2466 VkExtent3D src_copy_extent = region.extent;
2467 VkExtent3D dst_copy_extent =
2468 GetAdjustedDestImageExtent(src_image_state->createInfo.format, dst_image_state->createInfo.format, region.extent);
2469
2470 bool slice_override = false;
2471 uint32_t depth_slices = 0;
2472
2473 // Special case for copying between a 1D/2D array and a 3D image
2474 // TBD: This seems like the only way to reconcile 3 mutually-exclusive VU checks for 2D/3D copies. Heads up.
2475 if ((VK_IMAGE_TYPE_3D == src_image_state->createInfo.imageType) &&
2476 (VK_IMAGE_TYPE_3D != dst_image_state->createInfo.imageType)) {
2477 depth_slices = region.dstSubresource.layerCount; // Slice count from 2D subresource
2478 slice_override = (depth_slices != 1);
2479 } else if ((VK_IMAGE_TYPE_3D == dst_image_state->createInfo.imageType) &&
2480 (VK_IMAGE_TYPE_3D != src_image_state->createInfo.imageType)) {
2481 depth_slices = region.srcSubresource.layerCount; // Slice count from 2D subresource
2482 slice_override = (depth_slices != 1);
2483 }
2484
2485 skip |= ValidateImageSubresourceLayers(device_data, cb_node, ®ion.srcSubresource, "vkCmdCopyImage", "srcSubresource", i);
2486 skip |= ValidateImageSubresourceLayers(device_data, cb_node, ®ion.dstSubresource, "vkCmdCopyImage", "dstSubresource", i);
2487 skip |= ValidateImageMipLevel(device_data, cb_node, src_image_state, region.srcSubresource.mipLevel, i, "vkCmdCopyImage",
2488 "srcSubresource", "VUID-vkCmdCopyImage-srcSubresource-01696");
2489 skip |= ValidateImageMipLevel(device_data, cb_node, dst_image_state, region.dstSubresource.mipLevel, i, "vkCmdCopyImage",
2490 "dstSubresource", "VUID-vkCmdCopyImage-dstSubresource-01697");
2491 skip |= ValidateImageArrayLayerRange(device_data, cb_node, src_image_state, region.srcSubresource.baseArrayLayer,
2492 region.srcSubresource.layerCount, i, "vkCmdCopyImage", "srcSubresource",
2493 "VUID-vkCmdCopyImage-srcSubresource-01698");
2494 skip |= ValidateImageArrayLayerRange(device_data, cb_node, dst_image_state, region.dstSubresource.baseArrayLayer,
2495 region.dstSubresource.layerCount, i, "vkCmdCopyImage", "dstSubresource",
2496 "VUID-vkCmdCopyImage-dstSubresource-01699");
2497
2498 if (GetDeviceExtensions()->vk_khr_maintenance1) {
2499 // No chance of mismatch if we're overriding depth slice count
2500 if (!slice_override) {
2501 // The number of depth slices in srcSubresource and dstSubresource must match
2502 // Depth comes from layerCount for 1D,2D resources, from extent.depth for 3D
2503 uint32_t src_slices =
2504 (VK_IMAGE_TYPE_3D == src_image_state->createInfo.imageType ? src_copy_extent.depth
2505 : region.srcSubresource.layerCount);
2506 uint32_t dst_slices =
2507 (VK_IMAGE_TYPE_3D == dst_image_state->createInfo.imageType ? dst_copy_extent.depth
2508 : region.dstSubresource.layerCount);
2509 if (src_slices != dst_slices) {
2510 std::stringstream ss;
2511 ss << "vkCmdCopyImage(): number of depth slices in source and destination subresources for pRegions[" << i
2512 << "] do not match";
2513 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2514 HandleToUint64(command_buffer), "VUID-VkImageCopy-extent-00140", "%s.", ss.str().c_str());
2515 }
2516 }
2517 } else {
2518 // For each region the layerCount member of srcSubresource and dstSubresource must match
2519 if (region.srcSubresource.layerCount != region.dstSubresource.layerCount) {
2520 std::stringstream ss;
2521 ss << "vkCmdCopyImage(): number of layers in source and destination subresources for pRegions[" << i
2522 << "] do not match";
2523 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2524 HandleToUint64(command_buffer), "VUID-VkImageCopy-extent-00140", "%s.", ss.str().c_str());
2525 }
2526 }
2527
2528 // Do multiplane-specific checks, if extension enabled
2529 if (GetDeviceExtensions()->vk_khr_sampler_ycbcr_conversion) {
2530 skip |= CopyImageMultiplaneValidation(device_data, command_buffer, src_image_state, dst_image_state, region);
2531 }
2532
2533 if (!GetDeviceExtensions()->vk_khr_sampler_ycbcr_conversion) {
2534 // not multi-plane, the aspectMask member of srcSubresource and dstSubresource must match
2535 if (region.srcSubresource.aspectMask != region.dstSubresource.aspectMask) {
2536 char const str[] = "vkCmdCopyImage(): Src and dest aspectMasks for each region must match";
2537 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2538 HandleToUint64(command_buffer), "VUID-VkImageCopy-aspectMask-00137", "%s.", str);
2539 }
2540 }
2541
2542 // For each region, the aspectMask member of srcSubresource must be present in the source image
2543 if (!VerifyAspectsPresent(region.srcSubresource.aspectMask, src_image_state->createInfo.format)) {
2544 std::stringstream ss;
2545 ss << "vkCmdCopyImage(): pRegion[" << i
2546 << "] srcSubresource.aspectMask cannot specify aspects not present in source image";
2547 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2548 HandleToUint64(command_buffer), "VUID-VkImageCopy-aspectMask-00142", "%s.", ss.str().c_str());
2549 }
2550
2551 // For each region, the aspectMask member of dstSubresource must be present in the destination image
2552 if (!VerifyAspectsPresent(region.dstSubresource.aspectMask, dst_image_state->createInfo.format)) {
2553 std::stringstream ss;
2554 ss << "vkCmdCopyImage(): pRegion[" << i
2555 << "] dstSubresource.aspectMask cannot specify aspects not present in dest image";
2556 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2557 HandleToUint64(command_buffer), "VUID-VkImageCopy-aspectMask-00143", "%s.", ss.str().c_str());
2558 }
2559
2560 // Check region extents for 1D-1D, 2D-2D, and 3D-3D copies
2561 if (src_image_state->createInfo.imageType == dst_image_state->createInfo.imageType) {
2562 // The source region specified by a given element of regions must be a region that is contained within srcImage
2563 VkExtent3D img_extent = GetImageSubresourceExtent(src_image_state, &(region.srcSubresource));
2564 if (0 != ExceedsBounds(®ion.srcOffset, &src_copy_extent, &img_extent)) {
2565 std::stringstream ss;
2566 ss << "vkCmdCopyImage(): Source pRegion[" << i << "] with mipLevel [ " << region.srcSubresource.mipLevel
2567 << " ], offset [ " << region.srcOffset.x << ", " << region.srcOffset.y << ", " << region.srcOffset.z
2568 << " ], extent [ " << src_copy_extent.width << ", " << src_copy_extent.height << ", " << src_copy_extent.depth
2569 << " ] exceeds the source image dimensions";
2570 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2571 HandleToUint64(command_buffer), "VUID-vkCmdCopyImage-pRegions-00122", "%s.", ss.str().c_str());
2572 }
2573
2574 // The destination region specified by a given element of regions must be a region that is contained within dst_image
2575 img_extent = GetImageSubresourceExtent(dst_image_state, &(region.dstSubresource));
2576 if (0 != ExceedsBounds(®ion.dstOffset, &dst_copy_extent, &img_extent)) {
2577 std::stringstream ss;
2578 ss << "vkCmdCopyImage(): Dest pRegion[" << i << "] with mipLevel [ " << region.dstSubresource.mipLevel
2579 << " ], offset [ " << region.dstOffset.x << ", " << region.dstOffset.y << ", " << region.dstOffset.z
2580 << " ], extent [ " << dst_copy_extent.width << ", " << dst_copy_extent.height << ", " << dst_copy_extent.depth
2581 << " ] exceeds the destination image dimensions";
2582 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2583 HandleToUint64(command_buffer), "VUID-vkCmdCopyImage-pRegions-00123", "%s.", ss.str().c_str());
2584 }
2585 }
2586
2587 // Each dimension offset + extent limits must fall with image subresource extent
2588 VkExtent3D subresource_extent = GetImageSubresourceExtent(src_image_state, &(region.srcSubresource));
2589 if (slice_override) src_copy_extent.depth = depth_slices;
2590 uint32_t extent_check = ExceedsBounds(&(region.srcOffset), &src_copy_extent, &subresource_extent);
2591 if (extent_check & x_bit) {
2592 skip |=
2593 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2594 HandleToUint64(command_buffer), "VUID-VkImageCopy-srcOffset-00144",
2595 "vkCmdCopyImage(): Source image pRegion %1d x-dimension offset [%1d] + extent [%1d] exceeds subResource "
2596 "width [%1d].",
2597 i, region.srcOffset.x, src_copy_extent.width, subresource_extent.width);
2598 }
2599
2600 if (extent_check & y_bit) {
2601 skip |=
2602 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2603 HandleToUint64(command_buffer), "VUID-VkImageCopy-srcOffset-00145",
2604 "vkCmdCopyImage(): Source image pRegion %1d y-dimension offset [%1d] + extent [%1d] exceeds subResource "
2605 "height [%1d].",
2606 i, region.srcOffset.y, src_copy_extent.height, subresource_extent.height);
2607 }
2608 if (extent_check & z_bit) {
2609 skip |=
2610 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2611 HandleToUint64(command_buffer), "VUID-VkImageCopy-srcOffset-00147",
2612 "vkCmdCopyImage(): Source image pRegion %1d z-dimension offset [%1d] + extent [%1d] exceeds subResource "
2613 "depth [%1d].",
2614 i, region.srcOffset.z, src_copy_extent.depth, subresource_extent.depth);
2615 }
2616
2617 // Adjust dest extent if necessary
2618 subresource_extent = GetImageSubresourceExtent(dst_image_state, &(region.dstSubresource));
2619 if (slice_override) dst_copy_extent.depth = depth_slices;
2620
2621 extent_check = ExceedsBounds(&(region.dstOffset), &dst_copy_extent, &subresource_extent);
2622 if (extent_check & x_bit) {
2623 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2624 HandleToUint64(command_buffer), "VUID-VkImageCopy-dstOffset-00150",
2625 "vkCmdCopyImage(): Dest image pRegion %1d x-dimension offset [%1d] + extent [%1d] exceeds subResource "
2626 "width [%1d].",
2627 i, region.dstOffset.x, dst_copy_extent.width, subresource_extent.width);
2628 }
2629 if (extent_check & y_bit) {
2630 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2631 HandleToUint64(command_buffer), "VUID-VkImageCopy-dstOffset-00151",
2632 "vkCmdCopyImage(): Dest image pRegion %1d y-dimension offset [%1d] + extent [%1d] exceeds subResource "
2633 "height [%1d].",
2634 i, region.dstOffset.y, dst_copy_extent.height, subresource_extent.height);
2635 }
2636 if (extent_check & z_bit) {
2637 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2638 HandleToUint64(command_buffer), "VUID-VkImageCopy-dstOffset-00153",
2639 "vkCmdCopyImage(): Dest image pRegion %1d z-dimension offset [%1d] + extent [%1d] exceeds subResource "
2640 "depth [%1d].",
2641 i, region.dstOffset.z, dst_copy_extent.depth, subresource_extent.depth);
2642 }
2643
2644 // The union of all source regions, and the union of all destination regions, specified by the elements of regions,
2645 // must not overlap in memory
2646 if (src_image_state->image == dst_image_state->image) {
2647 for (uint32_t j = 0; j < regionCount; j++) {
2648 if (RegionIntersects(®ion, &pRegions[j], src_image_state->createInfo.imageType,
2649 FormatIsMultiplane(src_image_state->createInfo.format))) {
2650 std::stringstream ss;
2651 ss << "vkCmdCopyImage(): pRegions[" << i << "] src overlaps with pRegions[" << j << "].";
2652 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2653 HandleToUint64(command_buffer), "VUID-vkCmdCopyImage-pRegions-00124", "%s.", ss.str().c_str());
2654 }
2655 }
2656 }
2657 }
2658
2659 // The formats of src_image and dst_image must be compatible. Formats are considered compatible if their texel size in bytes
2660 // is the same between both formats. For example, VK_FORMAT_R8G8B8A8_UNORM is compatible with VK_FORMAT_R32_UINT because
2661 // because both texels are 4 bytes in size. Depth/stencil formats must match exactly.
2662 if (FormatIsDepthOrStencil(src_image_state->createInfo.format) || FormatIsDepthOrStencil(dst_image_state->createInfo.format)) {
2663 if (src_image_state->createInfo.format != dst_image_state->createInfo.format) {
2664 char const str[] = "vkCmdCopyImage called with unmatched source and dest image depth/stencil formats.";
2665 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2666 HandleToUint64(command_buffer), kVUID_Core_DrawState_MismatchedImageFormat, str);
2667 }
2668 } else {
2669 if (!FormatSizesAreEqual(src_image_state->createInfo.format, dst_image_state->createInfo.format, regionCount, pRegions)) {
2670 char const str[] = "vkCmdCopyImage called with unmatched source and dest image format sizes.";
2671 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2672 HandleToUint64(command_buffer), "VUID-vkCmdCopyImage-srcImage-00135", "%s.", str);
2673 }
2674 }
2675
2676 // Source and dest image sample counts must match
2677 if (src_image_state->createInfo.samples != dst_image_state->createInfo.samples) {
2678 char const str[] = "vkCmdCopyImage() called on image pair with non-identical sample counts.";
2679 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2680 HandleToUint64(command_buffer), "VUID-vkCmdCopyImage-srcImage-00136", "%s", str);
2681 }
2682
2683 skip |= ValidateMemoryIsBoundToImage(device_data, src_image_state, "vkCmdCopyImage()", "VUID-vkCmdCopyImage-srcImage-00127");
2684 skip |= ValidateMemoryIsBoundToImage(device_data, dst_image_state, "vkCmdCopyImage()", "VUID-vkCmdCopyImage-dstImage-00132");
2685 // Validate that SRC & DST images have correct usage flags set
2686 skip |= ValidateImageUsageFlags(device_data, src_image_state, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, true,
2687 "VUID-vkCmdCopyImage-srcImage-00126", "vkCmdCopyImage()", "VK_IMAGE_USAGE_TRANSFER_SRC_BIT");
2688 skip |= ValidateImageUsageFlags(device_data, dst_image_state, VK_IMAGE_USAGE_TRANSFER_DST_BIT, true,
2689 "VUID-vkCmdCopyImage-dstImage-00131", "vkCmdCopyImage()", "VK_IMAGE_USAGE_TRANSFER_DST_BIT");
2690 if (GetApiVersion() >= VK_API_VERSION_1_1 || GetDeviceExtensions()->vk_khr_maintenance1) {
2691 skip |=
2692 ValidateImageFormatFeatureFlags(device_data, src_image_state, VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, "vkCmdCopyImage()",
2693 "VUID-vkCmdCopyImage-srcImage-01995", "VUID-vkCmdCopyImage-srcImage-01995");
2694 skip |=
2695 ValidateImageFormatFeatureFlags(device_data, dst_image_state, VK_FORMAT_FEATURE_TRANSFER_DST_BIT, "vkCmdCopyImage()",
2696 "VUID-vkCmdCopyImage-dstImage-01996", "VUID-vkCmdCopyImage-dstImage-01996");
2697 }
2698 skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdCopyImage()",
2699 VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
2700 "VUID-vkCmdCopyImage-commandBuffer-cmdpool");
2701 skip |= ValidateCmd(device_data, cb_node, CMD_COPYIMAGE, "vkCmdCopyImage()");
2702 skip |= InsideRenderPass(device_data, cb_node, "vkCmdCopyImage()", "VUID-vkCmdCopyImage-renderpass");
2703 bool hit_error = false;
2704 const char *invalid_src_layout_vuid =
2705 (src_image_state->shared_presentable && GetDeviceExtensions()->vk_khr_shared_presentable_image)
2706 ? "VUID-vkCmdCopyImage-srcImageLayout-01917"
2707 : "VUID-vkCmdCopyImage-srcImageLayout-00129";
2708 const char *invalid_dst_layout_vuid =
2709 (dst_image_state->shared_presentable && GetDeviceExtensions()->vk_khr_shared_presentable_image)
2710 ? "VUID-vkCmdCopyImage-dstImageLayout-01395"
2711 : "VUID-vkCmdCopyImage-dstImageLayout-00134";
2712 for (uint32_t i = 0; i < regionCount; ++i) {
2713 skip |= VerifyImageLayout(device_data, cb_node, src_image_state, pRegions[i].srcSubresource, srcImageLayout,
2714 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "vkCmdCopyImage()", invalid_src_layout_vuid,
2715 "VUID-vkCmdCopyImage-srcImageLayout-00128", &hit_error);
2716 skip |= VerifyImageLayout(device_data, cb_node, dst_image_state, pRegions[i].dstSubresource, dstImageLayout,
2717 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "vkCmdCopyImage()", invalid_dst_layout_vuid,
2718 "VUID-vkCmdCopyImage-dstImageLayout-00133", &hit_error);
2719 skip |= ValidateCopyImageTransferGranularityRequirements(device_data, cb_node, src_image_state, dst_image_state,
2720 &pRegions[i], i, "vkCmdCopyImage()");
2721 }
2722
2723 return skip;
2724 }
2725
PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageCopy * pRegions)2726 void CoreChecks::PreCallRecordCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2727 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2728 const VkImageCopy *pRegions) {
2729 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
2730 auto cb_node = GetCBNode(commandBuffer);
2731 auto src_image_state = GetImageState(srcImage);
2732 auto dst_image_state = GetImageState(dstImage);
2733
2734 // Make sure that all image slices are updated to correct layout
2735 for (uint32_t i = 0; i < regionCount; ++i) {
2736 SetImageLayout(device_data, cb_node, src_image_state, pRegions[i].srcSubresource, srcImageLayout);
2737 SetImageLayout(device_data, cb_node, dst_image_state, pRegions[i].dstSubresource, dstImageLayout);
2738 }
2739 // Update bindings between images and cmd buffer
2740 AddCommandBufferBindingImage(device_data, cb_node, src_image_state);
2741 AddCommandBufferBindingImage(device_data, cb_node, dst_image_state);
2742 }
2743
2744 // Returns true if sub_rect is entirely contained within rect
ContainsRect(VkRect2D rect,VkRect2D sub_rect)2745 static inline bool ContainsRect(VkRect2D rect, VkRect2D sub_rect) {
2746 if ((sub_rect.offset.x < rect.offset.x) || (sub_rect.offset.x + sub_rect.extent.width > rect.offset.x + rect.extent.width) ||
2747 (sub_rect.offset.y < rect.offset.y) || (sub_rect.offset.y + sub_rect.extent.height > rect.offset.y + rect.extent.height))
2748 return false;
2749 return true;
2750 }
2751
ValidateClearAttachmentExtent(layer_data * device_data,VkCommandBuffer command_buffer,uint32_t attachment_index,FRAMEBUFFER_STATE * framebuffer,uint32_t fb_attachment,const VkRect2D & render_area,uint32_t rect_count,const VkClearRect * clear_rects)2752 bool CoreChecks::ValidateClearAttachmentExtent(layer_data *device_data, VkCommandBuffer command_buffer, uint32_t attachment_index,
2753 FRAMEBUFFER_STATE *framebuffer, uint32_t fb_attachment, const VkRect2D &render_area,
2754 uint32_t rect_count, const VkClearRect *clear_rects) {
2755 bool skip = false;
2756 const IMAGE_VIEW_STATE *image_view_state = nullptr;
2757 if (framebuffer && (fb_attachment != VK_ATTACHMENT_UNUSED) && (fb_attachment < framebuffer->createInfo.attachmentCount)) {
2758 image_view_state = GetImageViewState(framebuffer->createInfo.pAttachments[fb_attachment]);
2759 }
2760
2761 for (uint32_t j = 0; j < rect_count; j++) {
2762 if (!ContainsRect(render_area, clear_rects[j].rect)) {
2763 skip |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2764 HandleToUint64(command_buffer), "VUID-vkCmdClearAttachments-pRects-00016",
2765 "vkCmdClearAttachments(): The area defined by pRects[%d] is not contained in the area of "
2766 "the current render pass instance.",
2767 j);
2768 }
2769
2770 if (image_view_state) {
2771 // The layers specified by a given element of pRects must be contained within every attachment that
2772 // pAttachments refers to
2773 const auto attachment_layer_count = image_view_state->create_info.subresourceRange.layerCount;
2774 if ((clear_rects[j].baseArrayLayer >= attachment_layer_count) ||
2775 (clear_rects[j].baseArrayLayer + clear_rects[j].layerCount > attachment_layer_count)) {
2776 skip |=
2777 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2778 HandleToUint64(command_buffer), "VUID-vkCmdClearAttachments-pRects-00017",
2779 "vkCmdClearAttachments(): The layers defined in pRects[%d] are not contained in the layers "
2780 "of pAttachment[%d].",
2781 j, attachment_index);
2782 }
2783 }
2784 }
2785 return skip;
2786 }
2787
PreCallValidateCmdClearAttachments(VkCommandBuffer commandBuffer,uint32_t attachmentCount,const VkClearAttachment * pAttachments,uint32_t rectCount,const VkClearRect * pRects)2788 bool CoreChecks::PreCallValidateCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount,
2789 const VkClearAttachment *pAttachments, uint32_t rectCount,
2790 const VkClearRect *pRects) {
2791 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
2792
2793 GLOBAL_CB_NODE *cb_node = GetCBNode(commandBuffer);
2794
2795 bool skip = false;
2796 if (cb_node) {
2797 skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdClearAttachments()", VK_QUEUE_GRAPHICS_BIT,
2798 "VUID-vkCmdClearAttachments-commandBuffer-cmdpool");
2799 skip |= ValidateCmd(device_data, cb_node, CMD_CLEARATTACHMENTS, "vkCmdClearAttachments()");
2800 // Warn if this is issued prior to Draw Cmd and clearing the entire attachment
2801 if (!cb_node->hasDrawCmd && (cb_node->activeRenderPassBeginInfo.renderArea.extent.width == pRects[0].rect.extent.width) &&
2802 (cb_node->activeRenderPassBeginInfo.renderArea.extent.height == pRects[0].rect.extent.height)) {
2803 // There are times where app needs to use ClearAttachments (generally when reusing a buffer inside of a render pass)
2804 // This warning should be made more specific. It'd be best to avoid triggering this test if it's a use that must call
2805 // CmdClearAttachments.
2806 skip |=
2807 log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2808 HandleToUint64(commandBuffer), kVUID_Core_DrawState_ClearCmdBeforeDraw,
2809 "vkCmdClearAttachments() issued on command buffer object %s prior to any Draw Cmds. It is recommended you "
2810 "use RenderPass LOAD_OP_CLEAR on Attachments prior to any Draw.",
2811 report_data->FormatHandle(commandBuffer).c_str());
2812 }
2813 skip |= OutsideRenderPass(device_data, cb_node, "vkCmdClearAttachments()", "VUID-vkCmdClearAttachments-renderpass");
2814 }
2815
2816 // Validate that attachment is in reference list of active subpass
2817 if (cb_node->activeRenderPass) {
2818 const VkRenderPassCreateInfo2KHR *renderpass_create_info = cb_node->activeRenderPass->createInfo.ptr();
2819 const uint32_t renderpass_attachment_count = renderpass_create_info->attachmentCount;
2820 const VkSubpassDescription2KHR *subpass_desc = &renderpass_create_info->pSubpasses[cb_node->activeSubpass];
2821 auto framebuffer = GetFramebufferState(cb_node->activeFramebuffer);
2822 const auto &render_area = cb_node->activeRenderPassBeginInfo.renderArea;
2823 std::shared_ptr<std::vector<VkClearRect>> clear_rect_copy;
2824
2825 for (uint32_t attachment_index = 0; attachment_index < attachmentCount; attachment_index++) {
2826 auto clear_desc = &pAttachments[attachment_index];
2827 uint32_t fb_attachment = VK_ATTACHMENT_UNUSED;
2828
2829 if (0 == clear_desc->aspectMask) {
2830 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2831 HandleToUint64(commandBuffer), "VUID-VkClearAttachment-aspectMask-requiredbitmask", " ");
2832 } else if (clear_desc->aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
2833 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2834 HandleToUint64(commandBuffer), "VUID-VkClearAttachment-aspectMask-00020", " ");
2835 } else if (clear_desc->aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) {
2836 uint32_t color_attachment = VK_ATTACHMENT_UNUSED;
2837 if (clear_desc->colorAttachment < subpass_desc->colorAttachmentCount) {
2838 color_attachment = subpass_desc->pColorAttachments[clear_desc->colorAttachment].attachment;
2839 if ((color_attachment != VK_ATTACHMENT_UNUSED) && (color_attachment >= renderpass_attachment_count)) {
2840 skip |= log_msg(
2841 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2842 HandleToUint64(commandBuffer), "VUID-vkCmdClearAttachments-aspectMask-02501",
2843 "vkCmdClearAttachments() pAttachments[%u].colorAttachment=%u is not VK_ATTACHMENT_UNUSED "
2844 "and not a valid attachment for render pass %s attachmentCount=%u. Subpass %u pColorAttachment[%u]=%u.",
2845 attachment_index, clear_desc->colorAttachment,
2846 report_data->FormatHandle(cb_node->activeRenderPass->renderPass).c_str(), cb_node->activeSubpass,
2847 clear_desc->colorAttachment, color_attachment, renderpass_attachment_count);
2848
2849 color_attachment = VK_ATTACHMENT_UNUSED; // Defensive, prevent lookup past end of renderpass attachment
2850 }
2851 } else {
2852 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2853 HandleToUint64(commandBuffer), "VUID-vkCmdClearAttachments-aspectMask-02501",
2854 "vkCmdClearAttachments() pAttachments[%u].colorAttachment=%u out of range for render pass %s"
2855 " subpass %u. colorAttachmentCount=%u",
2856 attachment_index, clear_desc->colorAttachment,
2857 report_data->FormatHandle(cb_node->activeRenderPass->renderPass).c_str(),
2858 cb_node->activeSubpass, subpass_desc->colorAttachmentCount);
2859 }
2860 fb_attachment = color_attachment;
2861
2862 if ((clear_desc->aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) ||
2863 (clear_desc->aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT)) {
2864 char const str[] =
2865 "vkCmdClearAttachments() aspectMask [%d] must set only VK_IMAGE_ASPECT_COLOR_BIT of a color attachment.";
2866 skip |=
2867 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2868 HandleToUint64(commandBuffer), "VUID-VkClearAttachment-aspectMask-00019", str, attachment_index);
2869 }
2870 } else { // Must be depth and/or stencil
2871 if (((clear_desc->aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) != VK_IMAGE_ASPECT_DEPTH_BIT) &&
2872 ((clear_desc->aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) != VK_IMAGE_ASPECT_STENCIL_BIT)) {
2873 char const str[] = "vkCmdClearAttachments() aspectMask [%d] is not a valid combination of bits.";
2874 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2875 HandleToUint64(commandBuffer), "VUID-VkClearAttachment-aspectMask-parameter", str,
2876 attachment_index);
2877 }
2878 if (!subpass_desc->pDepthStencilAttachment ||
2879 (subpass_desc->pDepthStencilAttachment->attachment == VK_ATTACHMENT_UNUSED)) {
2880 skip |= log_msg(
2881 report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2882 HandleToUint64(commandBuffer), kVUID_Core_DrawState_MissingAttachmentReference,
2883 "vkCmdClearAttachments() depth/stencil clear with no depth/stencil attachment in subpass; ignored");
2884 } else {
2885 fb_attachment = subpass_desc->pDepthStencilAttachment->attachment;
2886 }
2887 }
2888 if (cb_node->createInfo.level == VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
2889 skip |= ValidateClearAttachmentExtent(device_data, commandBuffer, attachment_index, framebuffer, fb_attachment,
2890 render_area, rectCount, pRects);
2891 } else {
2892 // if a secondary level command buffer inherits the framebuffer from the primary command buffer
2893 // (see VkCommandBufferInheritanceInfo), this validation must be deferred until queue submit time
2894 if (!clear_rect_copy) {
2895 // We need a copy of the clear rectangles that will persist until the last lambda executes
2896 // but we want to create it as lazily as possible
2897 clear_rect_copy.reset(new std::vector<VkClearRect>(pRects, pRects + rectCount));
2898 }
2899
2900 auto val_fn = [device_data, commandBuffer, attachment_index, fb_attachment, rectCount, clear_rect_copy](
2901 GLOBAL_CB_NODE *prim_cb, VkFramebuffer fb) {
2902 assert(rectCount == clear_rect_copy->size());
2903 FRAMEBUFFER_STATE *framebuffer = device_data->GetFramebufferState(fb);
2904 const auto &render_area = prim_cb->activeRenderPassBeginInfo.renderArea;
2905 bool skip = false;
2906 skip =
2907 device_data->ValidateClearAttachmentExtent(device_data, commandBuffer, attachment_index, framebuffer,
2908 fb_attachment, render_area, rectCount, clear_rect_copy->data());
2909 return skip;
2910 };
2911 cb_node->cmd_execute_commands_functions.emplace_back(val_fn);
2912 }
2913 }
2914 }
2915 return skip;
2916 }
2917
PreCallValidateCmdResolveImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageResolve * pRegions)2918 bool CoreChecks::PreCallValidateCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2919 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2920 const VkImageResolve *pRegions) {
2921 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
2922 auto cb_node = GetCBNode(commandBuffer);
2923 auto src_image_state = GetImageState(srcImage);
2924 auto dst_image_state = GetImageState(dstImage);
2925
2926 bool skip = false;
2927 if (cb_node && src_image_state && dst_image_state) {
2928 skip |= ValidateMemoryIsBoundToImage(device_data, src_image_state, "vkCmdResolveImage()",
2929 "VUID-vkCmdResolveImage-srcImage-00256");
2930 skip |= ValidateMemoryIsBoundToImage(device_data, dst_image_state, "vkCmdResolveImage()",
2931 "VUID-vkCmdResolveImage-dstImage-00258");
2932 skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdResolveImage()", VK_QUEUE_GRAPHICS_BIT,
2933 "VUID-vkCmdResolveImage-commandBuffer-cmdpool");
2934 skip |= ValidateCmd(device_data, cb_node, CMD_RESOLVEIMAGE, "vkCmdResolveImage()");
2935 skip |= InsideRenderPass(device_data, cb_node, "vkCmdResolveImage()", "VUID-vkCmdResolveImage-renderpass");
2936 skip |= ValidateImageFormatFeatureFlags(device_data, dst_image_state, VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT,
2937 "vkCmdResolveImage()", "VUID-vkCmdResolveImage-dstImage-02003",
2938 "VUID-vkCmdResolveImage-dstImage-02003");
2939
2940 bool hit_error = false;
2941 const char *invalid_src_layout_vuid =
2942 (src_image_state->shared_presentable && GetDeviceExtensions()->vk_khr_shared_presentable_image)
2943 ? "VUID-vkCmdResolveImage-srcImageLayout-01400"
2944 : "VUID-vkCmdResolveImage-srcImageLayout-00261";
2945 const char *invalid_dst_layout_vuid =
2946 (dst_image_state->shared_presentable && GetDeviceExtensions()->vk_khr_shared_presentable_image)
2947 ? "VUID-vkCmdResolveImage-dstImageLayout-01401"
2948 : "VUID-vkCmdResolveImage-dstImageLayout-00263";
2949 // For each region, the number of layers in the image subresource should not be zero
2950 // For each region, src and dest image aspect must be color only
2951 for (uint32_t i = 0; i < regionCount; i++) {
2952 skip |= ValidateImageSubresourceLayers(device_data, cb_node, &pRegions[i].srcSubresource, "vkCmdResolveImage()",
2953 "srcSubresource", i);
2954 skip |= ValidateImageSubresourceLayers(device_data, cb_node, &pRegions[i].dstSubresource, "vkCmdResolveImage()",
2955 "dstSubresource", i);
2956 skip |= VerifyImageLayout(device_data, cb_node, src_image_state, pRegions[i].srcSubresource, srcImageLayout,
2957 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "vkCmdResolveImage()", invalid_src_layout_vuid,
2958 "VUID-vkCmdResolveImage-srcImageLayout-00260", &hit_error);
2959 skip |= VerifyImageLayout(device_data, cb_node, dst_image_state, pRegions[i].dstSubresource, dstImageLayout,
2960 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "vkCmdResolveImage()", invalid_dst_layout_vuid,
2961 "VUID-vkCmdResolveImage-dstImageLayout-00262", &hit_error);
2962 skip |= ValidateImageMipLevel(device_data, cb_node, src_image_state, pRegions[i].srcSubresource.mipLevel, i,
2963 "vkCmdResolveImage()", "srcSubresource", "VUID-vkCmdResolveImage-srcSubresource-01709");
2964 skip |= ValidateImageMipLevel(device_data, cb_node, dst_image_state, pRegions[i].dstSubresource.mipLevel, i,
2965 "vkCmdResolveImage()", "dstSubresource", "VUID-vkCmdResolveImage-dstSubresource-01710");
2966 skip |= ValidateImageArrayLayerRange(device_data, cb_node, src_image_state, pRegions[i].srcSubresource.baseArrayLayer,
2967 pRegions[i].srcSubresource.layerCount, i, "vkCmdResolveImage()", "srcSubresource",
2968 "VUID-vkCmdResolveImage-srcSubresource-01711");
2969 skip |= ValidateImageArrayLayerRange(device_data, cb_node, dst_image_state, pRegions[i].dstSubresource.baseArrayLayer,
2970 pRegions[i].dstSubresource.layerCount, i, "vkCmdResolveImage()", "srcSubresource",
2971 "VUID-vkCmdResolveImage-dstSubresource-01712");
2972
2973 // layer counts must match
2974 if (pRegions[i].srcSubresource.layerCount != pRegions[i].dstSubresource.layerCount) {
2975 skip |= log_msg(
2976 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2977 HandleToUint64(cb_node->commandBuffer), "VUID-VkImageResolve-layerCount-00267",
2978 "vkCmdResolveImage(): layerCount in source and destination subresource of pRegions[%d] does not match.", i);
2979 }
2980 // For each region, src and dest image aspect must be color only
2981 if ((pRegions[i].srcSubresource.aspectMask != VK_IMAGE_ASPECT_COLOR_BIT) ||
2982 (pRegions[i].dstSubresource.aspectMask != VK_IMAGE_ASPECT_COLOR_BIT)) {
2983 char const str[] =
2984 "vkCmdResolveImage(): src and dest aspectMasks for each region must specify only VK_IMAGE_ASPECT_COLOR_BIT";
2985 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2986 HandleToUint64(cb_node->commandBuffer), "VUID-VkImageResolve-aspectMask-00266", "%s.", str);
2987 }
2988 }
2989
2990 if (src_image_state->createInfo.format != dst_image_state->createInfo.format) {
2991 char const str[] = "vkCmdResolveImage called with unmatched source and dest formats.";
2992 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2993 HandleToUint64(cb_node->commandBuffer), kVUID_Core_DrawState_MismatchedImageFormat, str);
2994 }
2995 if (src_image_state->createInfo.imageType != dst_image_state->createInfo.imageType) {
2996 char const str[] = "vkCmdResolveImage called with unmatched source and dest image types.";
2997 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
2998 HandleToUint64(cb_node->commandBuffer), kVUID_Core_DrawState_MismatchedImageType, str);
2999 }
3000 if (src_image_state->createInfo.samples == VK_SAMPLE_COUNT_1_BIT) {
3001 char const str[] = "vkCmdResolveImage called with source sample count less than 2.";
3002 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3003 HandleToUint64(cb_node->commandBuffer), "VUID-vkCmdResolveImage-srcImage-00257", "%s.", str);
3004 }
3005 if (dst_image_state->createInfo.samples != VK_SAMPLE_COUNT_1_BIT) {
3006 char const str[] = "vkCmdResolveImage called with dest sample count greater than 1.";
3007 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3008 HandleToUint64(cb_node->commandBuffer), "VUID-vkCmdResolveImage-dstImage-00259", "%s.", str);
3009 }
3010 } else {
3011 assert(0);
3012 }
3013 return skip;
3014 }
3015
PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageResolve * pRegions)3016 void CoreChecks::PreCallRecordCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
3017 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
3018 const VkImageResolve *pRegions) {
3019 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
3020 auto cb_node = GetCBNode(commandBuffer);
3021 auto src_image_state = GetImageState(srcImage);
3022 auto dst_image_state = GetImageState(dstImage);
3023
3024 // Update bindings between images and cmd buffer
3025 AddCommandBufferBindingImage(device_data, cb_node, src_image_state);
3026 AddCommandBufferBindingImage(device_data, cb_node, dst_image_state);
3027 }
3028
PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageBlit * pRegions,VkFilter filter)3029 bool CoreChecks::PreCallValidateCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
3030 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
3031 const VkImageBlit *pRegions, VkFilter filter) {
3032 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
3033 auto cb_node = GetCBNode(commandBuffer);
3034 auto src_image_state = GetImageState(srcImage);
3035 auto dst_image_state = GetImageState(dstImage);
3036
3037 bool skip = false;
3038 if (cb_node) {
3039 skip |= ValidateCmd(device_data, cb_node, CMD_BLITIMAGE, "vkCmdBlitImage()");
3040 }
3041 if (cb_node && src_image_state && dst_image_state) {
3042 skip |= ValidateImageSampleCount(device_data, src_image_state, VK_SAMPLE_COUNT_1_BIT, "vkCmdBlitImage(): srcImage",
3043 "VUID-vkCmdBlitImage-srcImage-00233");
3044 skip |= ValidateImageSampleCount(device_data, dst_image_state, VK_SAMPLE_COUNT_1_BIT, "vkCmdBlitImage(): dstImage",
3045 "VUID-vkCmdBlitImage-dstImage-00234");
3046 skip |=
3047 ValidateMemoryIsBoundToImage(device_data, src_image_state, "vkCmdBlitImage()", "VUID-vkCmdBlitImage-srcImage-00220");
3048 skip |=
3049 ValidateMemoryIsBoundToImage(device_data, dst_image_state, "vkCmdBlitImage()", "VUID-vkCmdBlitImage-dstImage-00225");
3050 skip |=
3051 ValidateImageUsageFlags(device_data, src_image_state, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, true,
3052 "VUID-vkCmdBlitImage-srcImage-00219", "vkCmdBlitImage()", "VK_IMAGE_USAGE_TRANSFER_SRC_BIT");
3053 skip |=
3054 ValidateImageUsageFlags(device_data, dst_image_state, VK_IMAGE_USAGE_TRANSFER_DST_BIT, true,
3055 "VUID-vkCmdBlitImage-dstImage-00224", "vkCmdBlitImage()", "VK_IMAGE_USAGE_TRANSFER_DST_BIT");
3056 skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdBlitImage()", VK_QUEUE_GRAPHICS_BIT,
3057 "VUID-vkCmdBlitImage-commandBuffer-cmdpool");
3058 skip |= ValidateCmd(device_data, cb_node, CMD_BLITIMAGE, "vkCmdBlitImage()");
3059 skip |= InsideRenderPass(device_data, cb_node, "vkCmdBlitImage()", "VUID-vkCmdBlitImage-renderpass");
3060 skip |= ValidateImageFormatFeatureFlags(device_data, src_image_state, VK_FORMAT_FEATURE_BLIT_SRC_BIT, "vkCmdBlitImage()",
3061 "VUID-vkCmdBlitImage-srcImage-01999", "VUID-vkCmdBlitImage-srcImage-01999");
3062 skip |= ValidateImageFormatFeatureFlags(device_data, dst_image_state, VK_FORMAT_FEATURE_BLIT_DST_BIT, "vkCmdBlitImage()",
3063 "VUID-vkCmdBlitImage-dstImage-02000", "VUID-vkCmdBlitImage-dstImage-02000");
3064
3065 // TODO: Need to validate image layouts, which will include layout validation for shared presentable images
3066
3067 VkFormat src_format = src_image_state->createInfo.format;
3068 VkFormat dst_format = dst_image_state->createInfo.format;
3069 VkImageType src_type = src_image_state->createInfo.imageType;
3070 VkImageType dst_type = dst_image_state->createInfo.imageType;
3071
3072 if (VK_FILTER_LINEAR == filter) {
3073 skip |= ValidateImageFormatFeatureFlags(device_data, src_image_state, VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT,
3074 "vkCmdBlitImage()", "VUID-vkCmdBlitImage-filter-02001",
3075 "VUID-vkCmdBlitImage-filter-02001");
3076 } else if (VK_FILTER_CUBIC_IMG == filter) {
3077 skip |= ValidateImageFormatFeatureFlags(device_data, src_image_state,
3078 VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG, "vkCmdBlitImage()",
3079 "VUID-vkCmdBlitImage-filter-02002", "VUID-vkCmdBlitImage-filter-02002");
3080 }
3081
3082 if ((VK_FILTER_CUBIC_IMG == filter) && (VK_IMAGE_TYPE_3D != src_type)) {
3083 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3084 HandleToUint64(cb_node->commandBuffer), "VUID-vkCmdBlitImage-filter-00237",
3085 "vkCmdBlitImage(): source image type must be VK_IMAGE_TYPE_3D when cubic filtering is specified.");
3086 }
3087
3088 if ((VK_SAMPLE_COUNT_1_BIT != src_image_state->createInfo.samples) ||
3089 (VK_SAMPLE_COUNT_1_BIT != dst_image_state->createInfo.samples)) {
3090 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3091 HandleToUint64(cb_node->commandBuffer), "VUID-vkCmdBlitImage-srcImage-00228",
3092 "vkCmdBlitImage(): source or dest image has sample count other than VK_SAMPLE_COUNT_1_BIT.");
3093 }
3094
3095 // Validate consistency for unsigned formats
3096 if (FormatIsUInt(src_format) != FormatIsUInt(dst_format)) {
3097 std::stringstream ss;
3098 ss << "vkCmdBlitImage(): If one of srcImage and dstImage images has unsigned integer format, "
3099 << "the other one must also have unsigned integer format. "
3100 << "Source format is " << string_VkFormat(src_format) << " Destination format is " << string_VkFormat(dst_format);
3101 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3102 HandleToUint64(cb_node->commandBuffer), "VUID-vkCmdBlitImage-srcImage-00230", "%s.", ss.str().c_str());
3103 }
3104
3105 // Validate consistency for signed formats
3106 if (FormatIsSInt(src_format) != FormatIsSInt(dst_format)) {
3107 std::stringstream ss;
3108 ss << "vkCmdBlitImage(): If one of srcImage and dstImage images has signed integer format, "
3109 << "the other one must also have signed integer format. "
3110 << "Source format is " << string_VkFormat(src_format) << " Destination format is " << string_VkFormat(dst_format);
3111 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3112 HandleToUint64(cb_node->commandBuffer), "VUID-vkCmdBlitImage-srcImage-00229", "%s.", ss.str().c_str());
3113 }
3114
3115 // Validate filter for Depth/Stencil formats
3116 if (FormatIsDepthOrStencil(src_format) && (filter != VK_FILTER_NEAREST)) {
3117 std::stringstream ss;
3118 ss << "vkCmdBlitImage(): If the format of srcImage is a depth, stencil, or depth stencil "
3119 << "then filter must be VK_FILTER_NEAREST.";
3120 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3121 HandleToUint64(cb_node->commandBuffer), "VUID-vkCmdBlitImage-srcImage-00232", "%s.", ss.str().c_str());
3122 }
3123
3124 // Validate aspect bits and formats for depth/stencil images
3125 if (FormatIsDepthOrStencil(src_format) || FormatIsDepthOrStencil(dst_format)) {
3126 if (src_format != dst_format) {
3127 std::stringstream ss;
3128 ss << "vkCmdBlitImage(): If one of srcImage and dstImage images has a format of depth, stencil or depth "
3129 << "stencil, the other one must have exactly the same format. "
3130 << "Source format is " << string_VkFormat(src_format) << " Destination format is "
3131 << string_VkFormat(dst_format);
3132 skip |=
3133 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3134 HandleToUint64(cb_node->commandBuffer), "VUID-vkCmdBlitImage-srcImage-00231", "%s.", ss.str().c_str());
3135 }
3136 } // Depth or Stencil
3137
3138 // Do per-region checks
3139 const char *invalid_src_layout_vuid =
3140 (src_image_state->shared_presentable && GetDeviceExtensions()->vk_khr_shared_presentable_image)
3141 ? "VUID-vkCmdBlitImage-srcImageLayout-01398"
3142 : "VUID-vkCmdBlitImage-srcImageLayout-00222";
3143 const char *invalid_dst_layout_vuid =
3144 (dst_image_state->shared_presentable && GetDeviceExtensions()->vk_khr_shared_presentable_image)
3145 ? "VUID-vkCmdBlitImage-dstImageLayout-01399"
3146 : "VUID-vkCmdBlitImage-dstImageLayout-00227";
3147 for (uint32_t i = 0; i < regionCount; i++) {
3148 const VkImageBlit rgn = pRegions[i];
3149 bool hit_error = false;
3150 skip |= VerifyImageLayout(device_data, cb_node, src_image_state, rgn.srcSubresource, srcImageLayout,
3151 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "vkCmdBlitImage()", invalid_src_layout_vuid,
3152 "VUID-vkCmdBlitImage-srcImageLayout-00221", &hit_error);
3153 skip |= VerifyImageLayout(device_data, cb_node, dst_image_state, rgn.dstSubresource, dstImageLayout,
3154 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "vkCmdBlitImage()", invalid_dst_layout_vuid,
3155 "VUID-vkCmdBlitImage-dstImageLayout-00226", &hit_error);
3156 skip |=
3157 ValidateImageSubresourceLayers(device_data, cb_node, &rgn.srcSubresource, "vkCmdBlitImage()", "srcSubresource", i);
3158 skip |=
3159 ValidateImageSubresourceLayers(device_data, cb_node, &rgn.dstSubresource, "vkCmdBlitImage()", "dstSubresource", i);
3160 skip |= ValidateImageMipLevel(device_data, cb_node, src_image_state, rgn.srcSubresource.mipLevel, i, "vkCmdBlitImage()",
3161 "srcSubresource", "VUID-vkCmdBlitImage-srcSubresource-01705");
3162 skip |= ValidateImageMipLevel(device_data, cb_node, dst_image_state, rgn.dstSubresource.mipLevel, i, "vkCmdBlitImage()",
3163 "dstSubresource", "VUID-vkCmdBlitImage-dstSubresource-01706");
3164 skip |= ValidateImageArrayLayerRange(device_data, cb_node, src_image_state, rgn.srcSubresource.baseArrayLayer,
3165 rgn.srcSubresource.layerCount, i, "vkCmdBlitImage()", "srcSubresource",
3166 "VUID-vkCmdBlitImage-srcSubresource-01707");
3167 skip |= ValidateImageArrayLayerRange(device_data, cb_node, dst_image_state, rgn.dstSubresource.baseArrayLayer,
3168 rgn.dstSubresource.layerCount, i, "vkCmdBlitImage()", "dstSubresource",
3169 "VUID-vkCmdBlitImage-dstSubresource-01708");
3170 // Warn for zero-sized regions
3171 if ((rgn.srcOffsets[0].x == rgn.srcOffsets[1].x) || (rgn.srcOffsets[0].y == rgn.srcOffsets[1].y) ||
3172 (rgn.srcOffsets[0].z == rgn.srcOffsets[1].z)) {
3173 std::stringstream ss;
3174 ss << "vkCmdBlitImage(): pRegions[" << i << "].srcOffsets specify a zero-volume area.";
3175 skip |=
3176 log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3177 HandleToUint64(cb_node->commandBuffer), kVUID_Core_DrawState_InvalidExtents, "%s", ss.str().c_str());
3178 }
3179 if ((rgn.dstOffsets[0].x == rgn.dstOffsets[1].x) || (rgn.dstOffsets[0].y == rgn.dstOffsets[1].y) ||
3180 (rgn.dstOffsets[0].z == rgn.dstOffsets[1].z)) {
3181 std::stringstream ss;
3182 ss << "vkCmdBlitImage(): pRegions[" << i << "].dstOffsets specify a zero-volume area.";
3183 skip |=
3184 log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3185 HandleToUint64(cb_node->commandBuffer), kVUID_Core_DrawState_InvalidExtents, "%s", ss.str().c_str());
3186 }
3187
3188 // Check that src/dst layercounts match
3189 if (rgn.srcSubresource.layerCount != rgn.dstSubresource.layerCount) {
3190 skip |= log_msg(
3191 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3192 HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-layerCount-00239",
3193 "vkCmdBlitImage(): layerCount in source and destination subresource of pRegions[%d] does not match.", i);
3194 }
3195
3196 if (rgn.srcSubresource.aspectMask != rgn.dstSubresource.aspectMask) {
3197 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3198 HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-aspectMask-00238",
3199 "vkCmdBlitImage(): aspectMask members for pRegion[%d] do not match.", i);
3200 }
3201
3202 if (!VerifyAspectsPresent(rgn.srcSubresource.aspectMask, src_format)) {
3203 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3204 HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-aspectMask-00241",
3205 "vkCmdBlitImage(): region [%d] source aspectMask (0x%x) specifies aspects not present in source "
3206 "image format %s.",
3207 i, rgn.srcSubresource.aspectMask, string_VkFormat(src_format));
3208 }
3209
3210 if (!VerifyAspectsPresent(rgn.dstSubresource.aspectMask, dst_format)) {
3211 skip |= log_msg(
3212 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3213 HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-aspectMask-00242",
3214 "vkCmdBlitImage(): region [%d] dest aspectMask (0x%x) specifies aspects not present in dest image format %s.",
3215 i, rgn.dstSubresource.aspectMask, string_VkFormat(dst_format));
3216 }
3217
3218 // Validate source image offsets
3219 VkExtent3D src_extent = GetImageSubresourceExtent(src_image_state, &(rgn.srcSubresource));
3220 if (VK_IMAGE_TYPE_1D == src_type) {
3221 if ((0 != rgn.srcOffsets[0].y) || (1 != rgn.srcOffsets[1].y)) {
3222 skip |=
3223 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3224 HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-srcImage-00245",
3225 "vkCmdBlitImage(): region [%d], source image of type VK_IMAGE_TYPE_1D with srcOffset[].y values "
3226 "of (%1d, %1d). These must be (0, 1).",
3227 i, rgn.srcOffsets[0].y, rgn.srcOffsets[1].y);
3228 }
3229 }
3230
3231 if ((VK_IMAGE_TYPE_1D == src_type) || (VK_IMAGE_TYPE_2D == src_type)) {
3232 if ((0 != rgn.srcOffsets[0].z) || (1 != rgn.srcOffsets[1].z)) {
3233 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3234 HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-srcImage-00247",
3235 "vkCmdBlitImage(): region [%d], source image of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D with "
3236 "srcOffset[].z values of (%1d, %1d). These must be (0, 1).",
3237 i, rgn.srcOffsets[0].z, rgn.srcOffsets[1].z);
3238 }
3239 }
3240
3241 bool oob = false;
3242 if ((rgn.srcOffsets[0].x < 0) || (rgn.srcOffsets[0].x > static_cast<int32_t>(src_extent.width)) ||
3243 (rgn.srcOffsets[1].x < 0) || (rgn.srcOffsets[1].x > static_cast<int32_t>(src_extent.width))) {
3244 oob = true;
3245 skip |= log_msg(
3246 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3247 HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-srcOffset-00243",
3248 "vkCmdBlitImage(): region [%d] srcOffset[].x values (%1d, %1d) exceed srcSubresource width extent (%1d).", i,
3249 rgn.srcOffsets[0].x, rgn.srcOffsets[1].x, src_extent.width);
3250 }
3251 if ((rgn.srcOffsets[0].y < 0) || (rgn.srcOffsets[0].y > static_cast<int32_t>(src_extent.height)) ||
3252 (rgn.srcOffsets[1].y < 0) || (rgn.srcOffsets[1].y > static_cast<int32_t>(src_extent.height))) {
3253 oob = true;
3254 skip |= log_msg(
3255 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3256 HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-srcOffset-00244",
3257 "vkCmdBlitImage(): region [%d] srcOffset[].y values (%1d, %1d) exceed srcSubresource height extent (%1d).", i,
3258 rgn.srcOffsets[0].y, rgn.srcOffsets[1].y, src_extent.height);
3259 }
3260 if ((rgn.srcOffsets[0].z < 0) || (rgn.srcOffsets[0].z > static_cast<int32_t>(src_extent.depth)) ||
3261 (rgn.srcOffsets[1].z < 0) || (rgn.srcOffsets[1].z > static_cast<int32_t>(src_extent.depth))) {
3262 oob = true;
3263 skip |= log_msg(
3264 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3265 HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-srcOffset-00246",
3266 "vkCmdBlitImage(): region [%d] srcOffset[].z values (%1d, %1d) exceed srcSubresource depth extent (%1d).", i,
3267 rgn.srcOffsets[0].z, rgn.srcOffsets[1].z, src_extent.depth);
3268 }
3269 if (oob) {
3270 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3271 HandleToUint64(cb_node->commandBuffer), "VUID-vkCmdBlitImage-pRegions-00215",
3272 "vkCmdBlitImage(): region [%d] source image blit region exceeds image dimensions.", i);
3273 }
3274
3275 // Validate dest image offsets
3276 VkExtent3D dst_extent = GetImageSubresourceExtent(dst_image_state, &(rgn.dstSubresource));
3277 if (VK_IMAGE_TYPE_1D == dst_type) {
3278 if ((0 != rgn.dstOffsets[0].y) || (1 != rgn.dstOffsets[1].y)) {
3279 skip |=
3280 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3281 HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-dstImage-00250",
3282 "vkCmdBlitImage(): region [%d], dest image of type VK_IMAGE_TYPE_1D with dstOffset[].y values of "
3283 "(%1d, %1d). These must be (0, 1).",
3284 i, rgn.dstOffsets[0].y, rgn.dstOffsets[1].y);
3285 }
3286 }
3287
3288 if ((VK_IMAGE_TYPE_1D == dst_type) || (VK_IMAGE_TYPE_2D == dst_type)) {
3289 if ((0 != rgn.dstOffsets[0].z) || (1 != rgn.dstOffsets[1].z)) {
3290 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3291 HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-dstImage-00252",
3292 "vkCmdBlitImage(): region [%d], dest image of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D with "
3293 "dstOffset[].z values of (%1d, %1d). These must be (0, 1).",
3294 i, rgn.dstOffsets[0].z, rgn.dstOffsets[1].z);
3295 }
3296 }
3297
3298 oob = false;
3299 if ((rgn.dstOffsets[0].x < 0) || (rgn.dstOffsets[0].x > static_cast<int32_t>(dst_extent.width)) ||
3300 (rgn.dstOffsets[1].x < 0) || (rgn.dstOffsets[1].x > static_cast<int32_t>(dst_extent.width))) {
3301 oob = true;
3302 skip |= log_msg(
3303 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3304 HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-dstOffset-00248",
3305 "vkCmdBlitImage(): region [%d] dstOffset[].x values (%1d, %1d) exceed dstSubresource width extent (%1d).", i,
3306 rgn.dstOffsets[0].x, rgn.dstOffsets[1].x, dst_extent.width);
3307 }
3308 if ((rgn.dstOffsets[0].y < 0) || (rgn.dstOffsets[0].y > static_cast<int32_t>(dst_extent.height)) ||
3309 (rgn.dstOffsets[1].y < 0) || (rgn.dstOffsets[1].y > static_cast<int32_t>(dst_extent.height))) {
3310 oob = true;
3311 skip |= log_msg(
3312 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3313 HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-dstOffset-00249",
3314 "vkCmdBlitImage(): region [%d] dstOffset[].y values (%1d, %1d) exceed dstSubresource height extent (%1d).", i,
3315 rgn.dstOffsets[0].y, rgn.dstOffsets[1].y, dst_extent.height);
3316 }
3317 if ((rgn.dstOffsets[0].z < 0) || (rgn.dstOffsets[0].z > static_cast<int32_t>(dst_extent.depth)) ||
3318 (rgn.dstOffsets[1].z < 0) || (rgn.dstOffsets[1].z > static_cast<int32_t>(dst_extent.depth))) {
3319 oob = true;
3320 skip |= log_msg(
3321 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3322 HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-dstOffset-00251",
3323 "vkCmdBlitImage(): region [%d] dstOffset[].z values (%1d, %1d) exceed dstSubresource depth extent (%1d).", i,
3324 rgn.dstOffsets[0].z, rgn.dstOffsets[1].z, dst_extent.depth);
3325 }
3326 if (oob) {
3327 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3328 HandleToUint64(cb_node->commandBuffer), "VUID-vkCmdBlitImage-pRegions-00216",
3329 "vkCmdBlitImage(): region [%d] destination image blit region exceeds image dimensions.", i);
3330 }
3331
3332 if ((VK_IMAGE_TYPE_3D == src_type) || (VK_IMAGE_TYPE_3D == dst_type)) {
3333 if ((0 != rgn.srcSubresource.baseArrayLayer) || (1 != rgn.srcSubresource.layerCount) ||
3334 (0 != rgn.dstSubresource.baseArrayLayer) || (1 != rgn.dstSubresource.layerCount)) {
3335 skip |=
3336 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3337 HandleToUint64(cb_node->commandBuffer), "VUID-VkImageBlit-srcImage-00240",
3338 "vkCmdBlitImage(): region [%d] blit to/from a 3D image type with a non-zero baseArrayLayer, or a "
3339 "layerCount other than 1.",
3340 i);
3341 }
3342 }
3343 } // per-region checks
3344 } else {
3345 assert(0);
3346 }
3347 return skip;
3348 }
3349
PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageBlit * pRegions,VkFilter filter)3350 void CoreChecks::PreCallRecordCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
3351 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
3352 const VkImageBlit *pRegions, VkFilter filter) {
3353 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
3354 auto cb_node = GetCBNode(commandBuffer);
3355 auto src_image_state = GetImageState(srcImage);
3356 auto dst_image_state = GetImageState(dstImage);
3357
3358 // Make sure that all image slices are updated to correct layout
3359 for (uint32_t i = 0; i < regionCount; ++i) {
3360 SetImageLayout(device_data, cb_node, src_image_state, pRegions[i].srcSubresource, srcImageLayout);
3361 SetImageLayout(device_data, cb_node, dst_image_state, pRegions[i].dstSubresource, dstImageLayout);
3362 }
3363 // Update bindings between images and cmd buffer
3364 AddCommandBufferBindingImage(device_data, cb_node, src_image_state);
3365 AddCommandBufferBindingImage(device_data, cb_node, dst_image_state);
3366 }
3367
3368 // This validates that the initial layout specified in the command buffer for the IMAGE is the same as the global IMAGE layout
ValidateCmdBufImageLayouts(layer_data * device_data,GLOBAL_CB_NODE * pCB,std::unordered_map<ImageSubresourcePair,IMAGE_LAYOUT_NODE> const & globalImageLayoutMap,std::unordered_map<ImageSubresourcePair,IMAGE_LAYOUT_NODE> & overlayLayoutMap)3369 bool CoreChecks::ValidateCmdBufImageLayouts(layer_data *device_data, GLOBAL_CB_NODE *pCB,
3370 std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> const &globalImageLayoutMap,
3371 std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> &overlayLayoutMap) {
3372 bool skip = false;
3373 for (auto cb_image_data : pCB->imageLayoutMap) {
3374 VkImageLayout imageLayout;
3375
3376 if (FindLayout(device_data, overlayLayoutMap, cb_image_data.first, imageLayout) ||
3377 FindLayout(device_data, globalImageLayoutMap, cb_image_data.first, imageLayout)) {
3378 if (cb_image_data.second.initialLayout == VK_IMAGE_LAYOUT_UNDEFINED) {
3379 // TODO: Set memory invalid which is in mem_tracker currently
3380 } else if (imageLayout != cb_image_data.second.initialLayout) {
3381 if (cb_image_data.first.hasSubresource) {
3382 skip |=
3383 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3384 HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_InvalidImageLayout,
3385 "Submitted command buffer expects image %s (subresource: aspectMask 0x%X array layer %u, mip level "
3386 "%u) to be in layout %s--instead, image %s's current layout is %s.",
3387 report_data->FormatHandle(cb_image_data.first.image).c_str(),
3388 cb_image_data.first.subresource.aspectMask, cb_image_data.first.subresource.arrayLayer,
3389 cb_image_data.first.subresource.mipLevel, string_VkImageLayout(cb_image_data.second.initialLayout),
3390 report_data->FormatHandle(cb_image_data.first.image).c_str(), string_VkImageLayout(imageLayout));
3391 } else {
3392 skip |= log_msg(
3393 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3394 HandleToUint64(pCB->commandBuffer), kVUID_Core_DrawState_InvalidImageLayout,
3395 "Submitted command buffer expects image %s to be in layout %s--instead, image %s's current layout is %s.",
3396 report_data->FormatHandle(cb_image_data.first.image).c_str(),
3397 string_VkImageLayout(cb_image_data.second.initialLayout),
3398 report_data->FormatHandle(cb_image_data.first.image).c_str(), string_VkImageLayout(imageLayout));
3399 }
3400 }
3401 SetLayout(overlayLayoutMap, cb_image_data.first, cb_image_data.second.layout);
3402 }
3403 }
3404 return skip;
3405 }
3406
UpdateCmdBufImageLayouts(layer_data * device_data,GLOBAL_CB_NODE * pCB)3407 void CoreChecks::UpdateCmdBufImageLayouts(layer_data *device_data, GLOBAL_CB_NODE *pCB) {
3408 for (auto cb_image_data : pCB->imageLayoutMap) {
3409 VkImageLayout imageLayout;
3410 FindGlobalLayout(device_data, cb_image_data.first, imageLayout);
3411 SetGlobalLayout(device_data, cb_image_data.first, cb_image_data.second.layout);
3412 }
3413 }
3414
3415 // ValidateLayoutVsAttachmentDescription is a general function where we can validate various state associated with the
3416 // VkAttachmentDescription structs that are used by the sub-passes of a renderpass. Initial check is to make sure that READ_ONLY
3417 // layout attachments don't have CLEAR as their loadOp.
ValidateLayoutVsAttachmentDescription(const debug_report_data * report_data,RenderPassCreateVersion rp_version,const VkImageLayout first_layout,const uint32_t attachment,const VkAttachmentDescription2KHR & attachment_description)3418 bool CoreChecks::ValidateLayoutVsAttachmentDescription(const debug_report_data *report_data, RenderPassCreateVersion rp_version,
3419 const VkImageLayout first_layout, const uint32_t attachment,
3420 const VkAttachmentDescription2KHR &attachment_description) {
3421 bool skip = false;
3422 const char *vuid;
3423 const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
3424
3425 // Verify that initial loadOp on READ_ONLY attachments is not CLEAR
3426 if (attachment_description.loadOp == VK_ATTACHMENT_LOAD_OP_CLEAR) {
3427 if (use_rp2 && ((first_layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL) ||
3428 (first_layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) ||
3429 (first_layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL))) {
3430 skip |=
3431 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
3432 "VUID-VkRenderPassCreateInfo2KHR-pAttachments-02522",
3433 "Cannot clear attachment %d with invalid first layout %s.", attachment, string_VkImageLayout(first_layout));
3434 } else if (!use_rp2 && ((first_layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL) ||
3435 (first_layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL))) {
3436 skip |=
3437 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
3438 "VUID-VkRenderPassCreateInfo-pAttachments-00836",
3439 "Cannot clear attachment %d with invalid first layout %s.", attachment, string_VkImageLayout(first_layout));
3440 }
3441 }
3442 if (attachment_description.loadOp == VK_ATTACHMENT_LOAD_OP_CLEAR) {
3443 if (first_layout == VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL) {
3444 vuid = use_rp2 ? kVUID_Core_DrawState_InvalidRenderpass : "VUID-VkRenderPassCreateInfo-pAttachments-01566";
3445 skip |=
3446 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
3447 "Cannot clear attachment %d with invalid first layout %s.", attachment, string_VkImageLayout(first_layout));
3448 }
3449 }
3450
3451 if (attachment_description.stencilLoadOp == VK_ATTACHMENT_LOAD_OP_CLEAR) {
3452 if (first_layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL) {
3453 vuid = use_rp2 ? kVUID_Core_DrawState_InvalidRenderpass : "VUID-VkRenderPassCreateInfo-pAttachments-01567";
3454 skip |=
3455 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
3456 "Cannot clear attachment %d with invalid first layout %s.", attachment, string_VkImageLayout(first_layout));
3457 }
3458 }
3459 return skip;
3460 }
3461
ValidateLayouts(layer_data * device_data,RenderPassCreateVersion rp_version,VkDevice device,const VkRenderPassCreateInfo2KHR * pCreateInfo)3462 bool CoreChecks::ValidateLayouts(layer_data *device_data, RenderPassCreateVersion rp_version, VkDevice device,
3463 const VkRenderPassCreateInfo2KHR *pCreateInfo) {
3464 bool skip = false;
3465 const char *vuid;
3466 const bool use_rp2 = (rp_version == RENDER_PASS_VERSION_2);
3467 const char *const function_name = use_rp2 ? "vkCreateRenderPass2KHR()" : "vkCreateRenderPass()";
3468
3469 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
3470 VkFormat format = pCreateInfo->pAttachments[i].format;
3471 if (pCreateInfo->pAttachments[i].initialLayout == VK_IMAGE_LAYOUT_UNDEFINED) {
3472 if ((FormatIsColor(format) || FormatHasDepth(format)) &&
3473 pCreateInfo->pAttachments[i].loadOp == VK_ATTACHMENT_LOAD_OP_LOAD) {
3474 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
3475 kVUID_Core_DrawState_InvalidRenderpass,
3476 "Render pass has an attachment with loadOp == VK_ATTACHMENT_LOAD_OP_LOAD and initialLayout == "
3477 "VK_IMAGE_LAYOUT_UNDEFINED. This is probably not what you intended. Consider using "
3478 "VK_ATTACHMENT_LOAD_OP_DONT_CARE instead if the image truely is undefined at the start of the "
3479 "render pass.");
3480 }
3481 if (FormatHasStencil(format) && pCreateInfo->pAttachments[i].stencilLoadOp == VK_ATTACHMENT_LOAD_OP_LOAD) {
3482 skip |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
3483 kVUID_Core_DrawState_InvalidRenderpass,
3484 "Render pass has an attachment with stencilLoadOp == VK_ATTACHMENT_LOAD_OP_LOAD and initialLayout "
3485 "== VK_IMAGE_LAYOUT_UNDEFINED. This is probably not what you intended. Consider using "
3486 "VK_ATTACHMENT_LOAD_OP_DONT_CARE instead if the image truely is undefined at the start of the "
3487 "render pass.");
3488 }
3489 }
3490 }
3491
3492 // Track when we're observing the first use of an attachment
3493 std::vector<bool> attach_first_use(pCreateInfo->attachmentCount, true);
3494 for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
3495 const VkSubpassDescription2KHR &subpass = pCreateInfo->pSubpasses[i];
3496
3497 // Check input attachments first, so we can detect first-use-as-input for VU #00349
3498 for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
3499 auto attach_index = subpass.pInputAttachments[j].attachment;
3500 if (attach_index == VK_ATTACHMENT_UNUSED) continue;
3501 switch (subpass.pInputAttachments[j].layout) {
3502 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
3503 case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
3504 // These are ideal.
3505 break;
3506
3507 case VK_IMAGE_LAYOUT_GENERAL:
3508 // May not be optimal. TODO: reconsider this warning based on other constraints.
3509 skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
3510 VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUID_Core_DrawState_InvalidImageLayout,
3511 "Layout for input attachment is GENERAL but should be READ_ONLY_OPTIMAL.");
3512 break;
3513
3514 case VK_IMAGE_LAYOUT_UNDEFINED:
3515 case VK_IMAGE_LAYOUT_PREINITIALIZED:
3516 vuid = use_rp2 ? "VUID-VkAttachmentReference2KHR-layout-03077" : "VUID-VkAttachmentReference-layout-00857";
3517 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
3518 "Layout for input attachment reference %u in subpass %u is %s but must be "
3519 "DEPTH_STENCIL_READ_ONLY, SHADER_READ_ONLY_OPTIMAL, or GENERAL.",
3520 j, i, string_VkImageLayout(subpass.pDepthStencilAttachment->layout));
3521 break;
3522
3523 case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR:
3524 case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR:
3525 if (GetDeviceExtensions()->vk_khr_maintenance2) {
3526 break;
3527 } else {
3528 // Intentionally fall through to generic error message
3529 }
3530 // fall through
3531
3532 default:
3533 // No other layouts are acceptable
3534 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
3535 kVUID_Core_DrawState_InvalidImageLayout,
3536 "Layout for input attachment is %s but can only be READ_ONLY_OPTIMAL or GENERAL.",
3537 string_VkImageLayout(subpass.pInputAttachments[j].layout));
3538 }
3539
3540 if (attach_first_use[attach_index]) {
3541 skip |= ValidateLayoutVsAttachmentDescription(report_data, rp_version, subpass.pInputAttachments[j].layout,
3542 attach_index, pCreateInfo->pAttachments[attach_index]);
3543
3544 bool used_as_depth =
3545 (subpass.pDepthStencilAttachment != NULL && subpass.pDepthStencilAttachment->attachment == attach_index);
3546 bool used_as_color = false;
3547 for (uint32_t k = 0; !used_as_depth && !used_as_color && k < subpass.colorAttachmentCount; ++k) {
3548 used_as_color = (subpass.pColorAttachments[k].attachment == attach_index);
3549 }
3550 if (!used_as_depth && !used_as_color &&
3551 pCreateInfo->pAttachments[attach_index].loadOp == VK_ATTACHMENT_LOAD_OP_CLEAR) {
3552 vuid = use_rp2 ? "VUID-VkSubpassDescription2KHR-loadOp-03064" : "VUID-VkSubpassDescription-loadOp-00846";
3553 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
3554 "%s: attachment %u is first used as an input attachment in subpass %u with loadOp=CLEAR.",
3555 function_name, attach_index, attach_index);
3556 }
3557 }
3558 attach_first_use[attach_index] = false;
3559 }
3560
3561 for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
3562 auto attach_index = subpass.pColorAttachments[j].attachment;
3563 if (attach_index == VK_ATTACHMENT_UNUSED) continue;
3564
3565 // TODO: Need a way to validate shared presentable images here, currently just allowing
3566 // VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR
3567 // as an acceptable layout, but need to make sure shared presentable images ONLY use that layout
3568 switch (subpass.pColorAttachments[j].layout) {
3569 case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
3570 // This is ideal.
3571 case VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR:
3572 // TODO: See note above, just assuming that attachment is shared presentable and allowing this for now.
3573 break;
3574
3575 case VK_IMAGE_LAYOUT_GENERAL:
3576 // May not be optimal; TODO: reconsider this warning based on other constraints?
3577 skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
3578 VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUID_Core_DrawState_InvalidImageLayout,
3579 "Layout for color attachment is GENERAL but should be COLOR_ATTACHMENT_OPTIMAL.");
3580 break;
3581
3582 case VK_IMAGE_LAYOUT_UNDEFINED:
3583 case VK_IMAGE_LAYOUT_PREINITIALIZED:
3584 vuid = use_rp2 ? "VUID-VkAttachmentReference2KHR-layout-03077" : "VUID-VkAttachmentReference-layout-00857";
3585 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
3586 "Layout for color attachment reference %u in subpass %u is %s but should be "
3587 "COLOR_ATTACHMENT_OPTIMAL or GENERAL.",
3588 j, i, string_VkImageLayout(subpass.pColorAttachments[j].layout));
3589 break;
3590
3591 default:
3592 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
3593 kVUID_Core_DrawState_InvalidImageLayout,
3594 "Layout for color attachment is %s but can only be COLOR_ATTACHMENT_OPTIMAL or GENERAL.",
3595 string_VkImageLayout(subpass.pColorAttachments[j].layout));
3596 }
3597
3598 if (subpass.pResolveAttachments && (subpass.pResolveAttachments[j].attachment != VK_ATTACHMENT_UNUSED) &&
3599 (subpass.pResolveAttachments[j].layout == VK_IMAGE_LAYOUT_UNDEFINED ||
3600 subpass.pResolveAttachments[j].layout == VK_IMAGE_LAYOUT_PREINITIALIZED)) {
3601 vuid = use_rp2 ? "VUID-VkAttachmentReference2KHR-layout-03077" : "VUID-VkAttachmentReference-layout-00857";
3602 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
3603 "Layout for resolve attachment reference %u in subpass %u is %s but should be "
3604 "COLOR_ATTACHMENT_OPTIMAL or GENERAL.",
3605 j, i, string_VkImageLayout(subpass.pResolveAttachments[j].layout));
3606 }
3607
3608 if (attach_first_use[attach_index]) {
3609 skip |= ValidateLayoutVsAttachmentDescription(report_data, rp_version, subpass.pColorAttachments[j].layout,
3610 attach_index, pCreateInfo->pAttachments[attach_index]);
3611 }
3612 attach_first_use[attach_index] = false;
3613 }
3614
3615 if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
3616 switch (subpass.pDepthStencilAttachment->layout) {
3617 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
3618 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
3619 // These are ideal.
3620 break;
3621
3622 case VK_IMAGE_LAYOUT_GENERAL:
3623 // May not be optimal; TODO: reconsider this warning based on other constraints? GENERAL can be better than
3624 // doing a bunch of transitions.
3625 skip |= log_msg(report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
3626 VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, kVUID_Core_DrawState_InvalidImageLayout,
3627 "GENERAL layout for depth attachment may not give optimal performance.");
3628 break;
3629
3630 case VK_IMAGE_LAYOUT_UNDEFINED:
3631 case VK_IMAGE_LAYOUT_PREINITIALIZED:
3632 vuid = use_rp2 ? "VUID-VkAttachmentReference2KHR-layout-03077" : "VUID-VkAttachmentReference-layout-00857";
3633 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, vuid,
3634 "Layout for depth attachment reference in subpass %u is %s but must be a valid depth/stencil "
3635 "layout or GENERAL.",
3636 i, string_VkImageLayout(subpass.pDepthStencilAttachment->layout));
3637 break;
3638
3639 case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR:
3640 case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR:
3641 if (GetDeviceExtensions()->vk_khr_maintenance2) {
3642 break;
3643 } else {
3644 // Intentionally fall through to generic error message
3645 }
3646 // fall through
3647
3648 default:
3649 // No other layouts are acceptable
3650 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
3651 kVUID_Core_DrawState_InvalidImageLayout,
3652 "Layout for depth attachment is %s but can only be DEPTH_STENCIL_ATTACHMENT_OPTIMAL, "
3653 "DEPTH_STENCIL_READ_ONLY_OPTIMAL or GENERAL.",
3654 string_VkImageLayout(subpass.pDepthStencilAttachment->layout));
3655 }
3656
3657 auto attach_index = subpass.pDepthStencilAttachment->attachment;
3658 if (attach_first_use[attach_index]) {
3659 skip |= ValidateLayoutVsAttachmentDescription(report_data, rp_version, subpass.pDepthStencilAttachment->layout,
3660 attach_index, pCreateInfo->pAttachments[attach_index]);
3661 }
3662 attach_first_use[attach_index] = false;
3663 }
3664 }
3665 return skip;
3666 }
3667
3668 // For any image objects that overlap mapped memory, verify that their layouts are PREINIT or GENERAL
ValidateMapImageLayouts(layer_data * device_data,VkDevice device,DEVICE_MEM_INFO const * mem_info,VkDeviceSize offset,VkDeviceSize end_offset)3669 bool CoreChecks::ValidateMapImageLayouts(layer_data *device_data, VkDevice device, DEVICE_MEM_INFO const *mem_info,
3670 VkDeviceSize offset, VkDeviceSize end_offset) {
3671 bool skip = false;
3672 // Iterate over all bound image ranges and verify that for any that overlap the map ranges, the layouts are
3673 // VK_IMAGE_LAYOUT_PREINITIALIZED or VK_IMAGE_LAYOUT_GENERAL
3674 // TODO : This can be optimized if we store ranges based on starting address and early exit when we pass our range
3675 for (auto image_handle : mem_info->bound_images) {
3676 auto img_it = mem_info->bound_ranges.find(image_handle);
3677 if (img_it != mem_info->bound_ranges.end()) {
3678 if (RangesIntersect(device_data, &img_it->second, offset, end_offset)) {
3679 std::vector<VkImageLayout> layouts;
3680 if (FindLayouts(device_data, VkImage(image_handle), layouts)) {
3681 for (auto layout : layouts) {
3682 if (layout != VK_IMAGE_LAYOUT_PREINITIALIZED && layout != VK_IMAGE_LAYOUT_GENERAL) {
3683 skip |=
3684 log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
3685 HandleToUint64(mem_info->mem), kVUID_Core_DrawState_InvalidImageLayout,
3686 "Mapping an image with layout %s can result in undefined behavior if this memory is used "
3687 "by the device. Only GENERAL or PREINITIALIZED should be used.",
3688 string_VkImageLayout(layout));
3689 }
3690 }
3691 }
3692 }
3693 }
3694 }
3695 return skip;
3696 }
3697
3698 // Helper function to validate correct usage bits set for buffers or images. Verify that (actual & desired) flags != 0 or, if strict
3699 // is true, verify that (actual & desired) flags == desired
ValidateUsageFlags(const layer_data * device_data,VkFlags actual,VkFlags desired,VkBool32 strict,uint64_t obj_handle,VulkanObjectType obj_type,const char * msgCode,char const * func_name,char const * usage_str)3700 bool CoreChecks::ValidateUsageFlags(const layer_data *device_data, VkFlags actual, VkFlags desired, VkBool32 strict,
3701 uint64_t obj_handle, VulkanObjectType obj_type, const char *msgCode, char const *func_name,
3702 char const *usage_str) {
3703 bool correct_usage = false;
3704 bool skip = false;
3705 const char *type_str = object_string[obj_type];
3706 if (strict) {
3707 correct_usage = ((actual & desired) == desired);
3708 } else {
3709 correct_usage = ((actual & desired) != 0);
3710 }
3711 if (!correct_usage) {
3712 if (msgCode == kVUIDUndefined) {
3713 // TODO: Fix callers with kVUIDUndefined to use correct validation checks.
3714 skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[obj_type], obj_handle,
3715 kVUID_Core_MemTrack_InvalidUsageFlag,
3716 "Invalid usage flag for %s %s used by %s. In this case, %s should have %s set during creation.",
3717 type_str, report_data->FormatHandle(obj_handle).c_str(), func_name, type_str, usage_str);
3718 } else {
3719 skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[obj_type], obj_handle, msgCode,
3720 "Invalid usage flag for %s %s used by %s. In this case, %s should have %s set during creation.",
3721 type_str, report_data->FormatHandle(obj_handle).c_str(), func_name, type_str, usage_str);
3722 }
3723 }
3724 return skip;
3725 }
3726
3727 // Helper function to validate usage flags for buffers. For given buffer_state send actual vs. desired usage off to helper above
3728 // where an error will be flagged if usage is not correct
ValidateImageUsageFlags(layer_data * device_data,IMAGE_STATE const * image_state,VkFlags desired,bool strict,const char * msgCode,char const * func_name,char const * usage_string)3729 bool CoreChecks::ValidateImageUsageFlags(layer_data *device_data, IMAGE_STATE const *image_state, VkFlags desired, bool strict,
3730 const char *msgCode, char const *func_name, char const *usage_string) {
3731 return ValidateUsageFlags(device_data, image_state->createInfo.usage, desired, strict, HandleToUint64(image_state->image),
3732 kVulkanObjectTypeImage, msgCode, func_name, usage_string);
3733 }
3734
ValidateImageFormatFeatureFlags(layer_data * dev_data,IMAGE_STATE const * image_state,VkFormatFeatureFlags desired,char const * func_name,const char * linear_vuid,const char * optimal_vuid)3735 bool CoreChecks::ValidateImageFormatFeatureFlags(layer_data *dev_data, IMAGE_STATE const *image_state, VkFormatFeatureFlags desired,
3736 char const *func_name, const char *linear_vuid, const char *optimal_vuid) {
3737 VkFormatProperties format_properties = GetPDFormatProperties(image_state->createInfo.format);
3738 bool skip = false;
3739 if (image_state->createInfo.tiling == VK_IMAGE_TILING_LINEAR) {
3740 if ((format_properties.linearTilingFeatures & desired) != desired) {
3741 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
3742 HandleToUint64(image_state->image), linear_vuid,
3743 "In %s, invalid linearTilingFeatures (0x%08X) for format %u used by image %s.", func_name,
3744 format_properties.linearTilingFeatures, image_state->createInfo.format,
3745 report_data->FormatHandle(image_state->image).c_str());
3746 }
3747 } else if (image_state->createInfo.tiling == VK_IMAGE_TILING_OPTIMAL) {
3748 if ((format_properties.optimalTilingFeatures & desired) != desired) {
3749 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
3750 HandleToUint64(image_state->image), optimal_vuid,
3751 "In %s, invalid optimalTilingFeatures (0x%08X) for format %u used by image %s.", func_name,
3752 format_properties.optimalTilingFeatures, image_state->createInfo.format,
3753 report_data->FormatHandle(image_state->image).c_str());
3754 }
3755 }
3756 return skip;
3757 }
3758
ValidateImageSubresourceLayers(layer_data * dev_data,const GLOBAL_CB_NODE * cb_node,const VkImageSubresourceLayers * subresource_layers,char const * func_name,char const * member,uint32_t i)3759 bool CoreChecks::ValidateImageSubresourceLayers(layer_data *dev_data, const GLOBAL_CB_NODE *cb_node,
3760 const VkImageSubresourceLayers *subresource_layers, char const *func_name,
3761 char const *member, uint32_t i) {
3762 bool skip = false;
3763 // layerCount must not be zero
3764 if (subresource_layers->layerCount == 0) {
3765 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3766 HandleToUint64(cb_node->commandBuffer), "VUID-VkImageSubresourceLayers-layerCount-01700",
3767 "In %s, pRegions[%u].%s.layerCount must not be zero.", func_name, i, member);
3768 }
3769 // aspectMask must not contain VK_IMAGE_ASPECT_METADATA_BIT
3770 if (subresource_layers->aspectMask & VK_IMAGE_ASPECT_METADATA_BIT) {
3771 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3772 HandleToUint64(cb_node->commandBuffer), "VUID-VkImageSubresourceLayers-aspectMask-00168",
3773 "In %s, pRegions[%u].%s.aspectMask has VK_IMAGE_ASPECT_METADATA_BIT set.", func_name, i, member);
3774 }
3775 // if aspectMask contains COLOR, it must not contain either DEPTH or STENCIL
3776 if ((subresource_layers->aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) &&
3777 (subresource_layers->aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT))) {
3778 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
3779 HandleToUint64(cb_node->commandBuffer), "VUID-VkImageSubresourceLayers-aspectMask-00167",
3780 "In %s, pRegions[%u].%s.aspectMask has VK_IMAGE_ASPECT_COLOR_BIT and either VK_IMAGE_ASPECT_DEPTH_BIT or "
3781 "VK_IMAGE_ASPECT_STENCIL_BIT set.",
3782 func_name, i, member);
3783 }
3784 return skip;
3785 }
3786
3787 // Helper function to validate usage flags for buffers. For given buffer_state send actual vs. desired usage off to helper above
3788 // where an error will be flagged if usage is not correct
ValidateBufferUsageFlags(const layer_data * device_data,BUFFER_STATE const * buffer_state,VkFlags desired,bool strict,const char * msgCode,char const * func_name,char const * usage_string)3789 bool CoreChecks::ValidateBufferUsageFlags(const layer_data *device_data, BUFFER_STATE const *buffer_state, VkFlags desired,
3790 bool strict, const char *msgCode, char const *func_name, char const *usage_string) {
3791 return ValidateUsageFlags(device_data, buffer_state->createInfo.usage, desired, strict, HandleToUint64(buffer_state->buffer),
3792 kVulkanObjectTypeBuffer, msgCode, func_name, usage_string);
3793 }
3794
ValidateBufferViewRange(const layer_data * device_data,const BUFFER_STATE * buffer_state,const VkBufferViewCreateInfo * pCreateInfo,const VkPhysicalDeviceLimits * device_limits)3795 bool CoreChecks::ValidateBufferViewRange(const layer_data *device_data, const BUFFER_STATE *buffer_state,
3796 const VkBufferViewCreateInfo *pCreateInfo, const VkPhysicalDeviceLimits *device_limits) {
3797 bool skip = false;
3798
3799 const VkDeviceSize &range = pCreateInfo->range;
3800 if (range != VK_WHOLE_SIZE) {
3801 // Range must be greater than 0
3802 if (range <= 0) {
3803 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
3804 HandleToUint64(buffer_state->buffer), "VUID-VkBufferViewCreateInfo-range-00928",
3805 "If VkBufferViewCreateInfo range (%" PRIuLEAST64
3806 ") does not equal VK_WHOLE_SIZE, range must be greater than 0.",
3807 range);
3808 }
3809 // Range must be a multiple of the element size of format
3810 const size_t format_size = FormatElementSize(pCreateInfo->format);
3811 if (range % format_size != 0) {
3812 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
3813 HandleToUint64(buffer_state->buffer), "VUID-VkBufferViewCreateInfo-range-00929",
3814 "If VkBufferViewCreateInfo range (%" PRIuLEAST64
3815 ") does not equal VK_WHOLE_SIZE, range must be a multiple of the element size of the format "
3816 "(" PRINTF_SIZE_T_SPECIFIER ").",
3817 range, format_size);
3818 }
3819 // Range divided by the element size of format must be less than or equal to VkPhysicalDeviceLimits::maxTexelBufferElements
3820 if (range / format_size > device_limits->maxTexelBufferElements) {
3821 skip |=
3822 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
3823 HandleToUint64(buffer_state->buffer), "VUID-VkBufferViewCreateInfo-range-00930",
3824 "If VkBufferViewCreateInfo range (%" PRIuLEAST64
3825 ") does not equal VK_WHOLE_SIZE, range divided by the element size of the format (" PRINTF_SIZE_T_SPECIFIER
3826 ") must be less than or equal to VkPhysicalDeviceLimits::maxTexelBufferElements (%" PRIuLEAST32 ").",
3827 range, format_size, device_limits->maxTexelBufferElements);
3828 }
3829 // The sum of range and offset must be less than or equal to the size of buffer
3830 if (range + pCreateInfo->offset > buffer_state->createInfo.size) {
3831 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
3832 HandleToUint64(buffer_state->buffer), "VUID-VkBufferViewCreateInfo-offset-00931",
3833 "If VkBufferViewCreateInfo range (%" PRIuLEAST64
3834 ") does not equal VK_WHOLE_SIZE, the sum of offset (%" PRIuLEAST64
3835 ") and range must be less than or equal to the size of the buffer (%" PRIuLEAST64 ").",
3836 range, pCreateInfo->offset, buffer_state->createInfo.size);
3837 }
3838 }
3839 return skip;
3840 }
3841
ValidateBufferViewBuffer(const layer_data * device_data,const BUFFER_STATE * buffer_state,const VkBufferViewCreateInfo * pCreateInfo)3842 bool CoreChecks::ValidateBufferViewBuffer(const layer_data *device_data, const BUFFER_STATE *buffer_state,
3843 const VkBufferViewCreateInfo *pCreateInfo) {
3844 bool skip = false;
3845 const VkFormatProperties format_properties = GetPDFormatProperties(pCreateInfo->format);
3846 if ((buffer_state->createInfo.usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) &&
3847 !(format_properties.bufferFeatures & VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT)) {
3848 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
3849 HandleToUint64(buffer_state->buffer), "VUID-VkBufferViewCreateInfo-buffer-00933",
3850 "If buffer was created with `usage` containing VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, format must "
3851 "be supported for uniform texel buffers");
3852 }
3853 if ((buffer_state->createInfo.usage & VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT) &&
3854 !(format_properties.bufferFeatures & VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT)) {
3855 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
3856 HandleToUint64(buffer_state->buffer), "VUID-VkBufferViewCreateInfo-buffer-00934",
3857 "If buffer was created with `usage` containing VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, format must "
3858 "be supported for storage texel buffers");
3859 }
3860 return skip;
3861 }
3862
PreCallValidateCreateBuffer(VkDevice device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)3863 bool CoreChecks::PreCallValidateCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
3864 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer) {
3865 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
3866
3867 bool skip = false;
3868
3869 // TODO: Add check for "VUID-vkCreateBuffer-flags-00911" (sparse address space accounting)
3870
3871 if ((pCreateInfo->flags & VK_BUFFER_CREATE_SPARSE_BINDING_BIT) && (!GetEnabledFeatures()->core.sparseBinding)) {
3872 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
3873 "VUID-VkBufferCreateInfo-flags-00915",
3874 "vkCreateBuffer(): the sparseBinding device feature is disabled: Buffers cannot be created with the "
3875 "VK_BUFFER_CREATE_SPARSE_BINDING_BIT set.");
3876 }
3877
3878 if ((pCreateInfo->flags & VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT) && (!GetEnabledFeatures()->core.sparseResidencyBuffer)) {
3879 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
3880 "VUID-VkBufferCreateInfo-flags-00916",
3881 "vkCreateBuffer(): the sparseResidencyBuffer device feature is disabled: Buffers cannot be created with "
3882 "the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT set.");
3883 }
3884
3885 if ((pCreateInfo->flags & VK_BUFFER_CREATE_SPARSE_ALIASED_BIT) && (!GetEnabledFeatures()->core.sparseResidencyAliased)) {
3886 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
3887 "VUID-VkBufferCreateInfo-flags-00917",
3888 "vkCreateBuffer(): the sparseResidencyAliased device feature is disabled: Buffers cannot be created with "
3889 "the VK_BUFFER_CREATE_SPARSE_ALIASED_BIT set.");
3890 }
3891
3892 auto chained_devaddr_struct = lvl_find_in_chain<VkBufferDeviceAddressCreateInfoEXT>(pCreateInfo->pNext);
3893 if (chained_devaddr_struct) {
3894 if (!(pCreateInfo->flags & VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT) &&
3895 chained_devaddr_struct->deviceAddress != 0) {
3896 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
3897 "VUID-VkBufferCreateInfo-deviceAddress-02604",
3898 "vkCreateBuffer(): Non-zero VkBufferDeviceAddressCreateInfoEXT::deviceAddress "
3899 "requires VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT.");
3900 }
3901 }
3902
3903 if ((pCreateInfo->flags & VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT) &&
3904 !GetEnabledFeatures()->buffer_address.bufferDeviceAddressCaptureReplay) {
3905 skip |= log_msg(
3906 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
3907 "VUID-VkBufferCreateInfo-flags-02605",
3908 "vkCreateBuffer(): the bufferDeviceAddressCaptureReplay device feature is disabled: Buffers cannot be created with "
3909 "the VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT set.");
3910 }
3911
3912 if ((pCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) &&
3913 !GetEnabledFeatures()->buffer_address.bufferDeviceAddress) {
3914 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
3915 "VUID-VkBufferCreateInfo-usage-02606",
3916 "vkCreateBuffer(): the bufferDeviceAddress device feature is disabled: Buffers cannot be created with "
3917 "the VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT set.");
3918 }
3919
3920 if (pCreateInfo->sharingMode == VK_SHARING_MODE_CONCURRENT && pCreateInfo->pQueueFamilyIndices) {
3921 skip |=
3922 ValidateQueueFamilies(device_data, pCreateInfo->queueFamilyIndexCount, pCreateInfo->pQueueFamilyIndices,
3923 "vkCreateBuffer", "pCreateInfo->pQueueFamilyIndices", "VUID-VkBufferCreateInfo-sharingMode-01419",
3924 "VUID-VkBufferCreateInfo-sharingMode-01419", false);
3925 }
3926
3927 return skip;
3928 }
3929
PostCallRecordCreateBuffer(VkDevice device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer,VkResult result)3930 void CoreChecks::PostCallRecordCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
3931 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer, VkResult result) {
3932 if (result != VK_SUCCESS) return;
3933 // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
3934 GetBufferMap()->insert(std::make_pair(*pBuffer, std::unique_ptr<BUFFER_STATE>(new BUFFER_STATE(*pBuffer, pCreateInfo))));
3935 }
3936
PreCallValidateCreateBufferView(VkDevice device,const VkBufferViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBufferView * pView)3937 bool CoreChecks::PreCallValidateCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
3938 const VkAllocationCallbacks *pAllocator, VkBufferView *pView) {
3939 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
3940
3941 bool skip = false;
3942 BUFFER_STATE *buffer_state = GetBufferState(pCreateInfo->buffer);
3943 // If this isn't a sparse buffer, it needs to have memory backing it at CreateBufferView time
3944 if (buffer_state) {
3945 skip |= ValidateMemoryIsBoundToBuffer(device_data, buffer_state, "vkCreateBufferView()",
3946 "VUID-VkBufferViewCreateInfo-buffer-00935");
3947 // In order to create a valid buffer view, the buffer must have been created with at least one of the following flags:
3948 // UNIFORM_TEXEL_BUFFER_BIT or STORAGE_TEXEL_BUFFER_BIT
3949 skip |= ValidateBufferUsageFlags(device_data, buffer_state,
3950 VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, false,
3951 "VUID-VkBufferViewCreateInfo-buffer-00932", "vkCreateBufferView()",
3952 "VK_BUFFER_USAGE_[STORAGE|UNIFORM]_TEXEL_BUFFER_BIT");
3953
3954 // Buffer view offset must be less than the size of buffer
3955 if (pCreateInfo->offset >= buffer_state->createInfo.size) {
3956 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
3957 HandleToUint64(buffer_state->buffer), "VUID-VkBufferViewCreateInfo-offset-00925",
3958 "VkBufferViewCreateInfo offset (%" PRIuLEAST64
3959 ") must be less than the size of the buffer (%" PRIuLEAST64 ").",
3960 pCreateInfo->offset, buffer_state->createInfo.size);
3961 }
3962
3963 const VkPhysicalDeviceLimits *device_limits = &(GetPDProperties()->limits);
3964 // Buffer view offset must be a multiple of VkPhysicalDeviceLimits::minTexelBufferOffsetAlignment
3965 if ((pCreateInfo->offset % device_limits->minTexelBufferOffsetAlignment) != 0) {
3966 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
3967 HandleToUint64(buffer_state->buffer), "VUID-VkBufferViewCreateInfo-offset-00926",
3968 "VkBufferViewCreateInfo offset (%" PRIuLEAST64
3969 ") must be a multiple of VkPhysicalDeviceLimits::minTexelBufferOffsetAlignment (%" PRIuLEAST64 ").",
3970 pCreateInfo->offset, device_limits->minTexelBufferOffsetAlignment);
3971 }
3972
3973 skip |= ValidateBufferViewRange(device_data, buffer_state, pCreateInfo, device_limits);
3974
3975 skip |= ValidateBufferViewBuffer(device_data, buffer_state, pCreateInfo);
3976 }
3977 return skip;
3978 }
3979
PostCallRecordCreateBufferView(VkDevice device,const VkBufferViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBufferView * pView,VkResult result)3980 void CoreChecks::PostCallRecordCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
3981 const VkAllocationCallbacks *pAllocator, VkBufferView *pView, VkResult result) {
3982 if (result != VK_SUCCESS) return;
3983 (*GetBufferViewMap())[*pView] = std::unique_ptr<BUFFER_VIEW_STATE>(new BUFFER_VIEW_STATE(*pView, pCreateInfo));
3984 }
3985
3986 // For the given format verify that the aspect masks make sense
ValidateImageAspectMask(const layer_data * device_data,VkImage image,VkFormat format,VkImageAspectFlags aspect_mask,const char * func_name,const char * vuid)3987 bool CoreChecks::ValidateImageAspectMask(const layer_data *device_data, VkImage image, VkFormat format,
3988 VkImageAspectFlags aspect_mask, const char *func_name, const char *vuid) {
3989 bool skip = false;
3990 VkDebugReportObjectTypeEXT objectType = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT;
3991 if (image != VK_NULL_HANDLE) {
3992 objectType = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT;
3993 }
3994
3995 if (FormatIsColor(format)) {
3996 if ((aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) != VK_IMAGE_ASPECT_COLOR_BIT) {
3997 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, HandleToUint64(image), vuid,
3998 "%s: Color image formats must have the VK_IMAGE_ASPECT_COLOR_BIT set.", func_name);
3999 } else if ((aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) != aspect_mask) {
4000 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, HandleToUint64(image), vuid,
4001 "%s: Color image formats must have ONLY the VK_IMAGE_ASPECT_COLOR_BIT set.", func_name);
4002 }
4003 } else if (FormatIsDepthAndStencil(format)) {
4004 if ((aspect_mask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) == 0) {
4005 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, HandleToUint64(image), vuid,
4006 "%s: Depth/stencil image formats must have at least one of VK_IMAGE_ASPECT_DEPTH_BIT and "
4007 "VK_IMAGE_ASPECT_STENCIL_BIT set.",
4008 func_name);
4009 } else if ((aspect_mask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) != aspect_mask) {
4010 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, HandleToUint64(image), vuid,
4011 "%s: Combination depth/stencil image formats can have only the VK_IMAGE_ASPECT_DEPTH_BIT and "
4012 "VK_IMAGE_ASPECT_STENCIL_BIT set.",
4013 func_name);
4014 }
4015 } else if (FormatIsDepthOnly(format)) {
4016 if ((aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) != VK_IMAGE_ASPECT_DEPTH_BIT) {
4017 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, HandleToUint64(image), vuid,
4018 "%s: Depth-only image formats must have the VK_IMAGE_ASPECT_DEPTH_BIT set.", func_name);
4019 } else if ((aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) != aspect_mask) {
4020 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, HandleToUint64(image), vuid,
4021 "%s: Depth-only image formats can have only the VK_IMAGE_ASPECT_DEPTH_BIT set.", func_name);
4022 }
4023 } else if (FormatIsStencilOnly(format)) {
4024 if ((aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) != VK_IMAGE_ASPECT_STENCIL_BIT) {
4025 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, HandleToUint64(image), vuid,
4026 "%s: Stencil-only image formats must have the VK_IMAGE_ASPECT_STENCIL_BIT set.", func_name);
4027 } else if ((aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) != aspect_mask) {
4028 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, HandleToUint64(image), vuid,
4029 "%s: Stencil-only image formats can have only the VK_IMAGE_ASPECT_STENCIL_BIT set.", func_name);
4030 }
4031 } else if (FormatIsMultiplane(format)) {
4032 VkImageAspectFlags valid_flags = VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT;
4033 if (3 == FormatPlaneCount(format)) {
4034 valid_flags = valid_flags | VK_IMAGE_ASPECT_PLANE_2_BIT;
4035 }
4036 if ((aspect_mask & valid_flags) != aspect_mask) {
4037 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, objectType, HandleToUint64(image), vuid,
4038 "%s: Multi-plane image formats may have only VK_IMAGE_ASPECT_COLOR_BIT or VK_IMAGE_ASPECT_PLANE_n_BITs "
4039 "set, where n = [0, 1, 2].",
4040 func_name);
4041 }
4042 }
4043 return skip;
4044 }
4045
ValidateImageSubresourceRange(const layer_data * device_data,const uint32_t image_mip_count,const uint32_t image_layer_count,const VkImageSubresourceRange & subresourceRange,const char * cmd_name,const char * param_name,const char * image_layer_count_var_name,const uint64_t image_handle,SubresourceRangeErrorCodes errorCodes)4046 bool CoreChecks::ValidateImageSubresourceRange(const layer_data *device_data, const uint32_t image_mip_count,
4047 const uint32_t image_layer_count, const VkImageSubresourceRange &subresourceRange,
4048 const char *cmd_name, const char *param_name, const char *image_layer_count_var_name,
4049 const uint64_t image_handle, SubresourceRangeErrorCodes errorCodes) {
4050 bool skip = false;
4051
4052 // Validate mip levels
4053 if (subresourceRange.baseMipLevel >= image_mip_count) {
4054 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
4055 errorCodes.base_mip_err,
4056 "%s: %s.baseMipLevel (= %" PRIu32
4057 ") is greater or equal to the mip level count of the image (i.e. greater or equal to %" PRIu32 ").",
4058 cmd_name, param_name, subresourceRange.baseMipLevel, image_mip_count);
4059 }
4060
4061 if (subresourceRange.levelCount != VK_REMAINING_MIP_LEVELS) {
4062 if (subresourceRange.levelCount == 0) {
4063 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
4064 errorCodes.mip_count_err, "%s: %s.levelCount is 0.", cmd_name, param_name);
4065 } else {
4066 const uint64_t necessary_mip_count = uint64_t{subresourceRange.baseMipLevel} + uint64_t{subresourceRange.levelCount};
4067
4068 if (necessary_mip_count > image_mip_count) {
4069 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
4070 errorCodes.mip_count_err,
4071 "%s: %s.baseMipLevel + .levelCount (= %" PRIu32 " + %" PRIu32 " = %" PRIu64
4072 ") is greater than the mip level count of the image (i.e. greater than %" PRIu32 ").",
4073 cmd_name, param_name, subresourceRange.baseMipLevel, subresourceRange.levelCount,
4074 necessary_mip_count, image_mip_count);
4075 }
4076 }
4077 }
4078
4079 // Validate array layers
4080 if (subresourceRange.baseArrayLayer >= image_layer_count) {
4081 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
4082 errorCodes.base_layer_err,
4083 "%s: %s.baseArrayLayer (= %" PRIu32
4084 ") is greater or equal to the %s of the image when it was created (i.e. greater or equal to %" PRIu32 ").",
4085 cmd_name, param_name, subresourceRange.baseArrayLayer, image_layer_count_var_name, image_layer_count);
4086 }
4087
4088 if (subresourceRange.layerCount != VK_REMAINING_ARRAY_LAYERS) {
4089 if (subresourceRange.layerCount == 0) {
4090 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
4091 errorCodes.layer_count_err, "%s: %s.layerCount is 0.", cmd_name, param_name);
4092 } else {
4093 const uint64_t necessary_layer_count =
4094 uint64_t{subresourceRange.baseArrayLayer} + uint64_t{subresourceRange.layerCount};
4095
4096 if (necessary_layer_count > image_layer_count) {
4097 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, image_handle,
4098 errorCodes.layer_count_err,
4099 "%s: %s.baseArrayLayer + .layerCount (= %" PRIu32 " + %" PRIu32 " = %" PRIu64
4100 ") is greater than the %s of the image when it was created (i.e. greater than %" PRIu32 ").",
4101 cmd_name, param_name, subresourceRange.baseArrayLayer, subresourceRange.layerCount,
4102 necessary_layer_count, image_layer_count_var_name, image_layer_count);
4103 }
4104 }
4105 }
4106
4107 return skip;
4108 }
4109
ValidateCreateImageViewSubresourceRange(const layer_data * device_data,const IMAGE_STATE * image_state,bool is_imageview_2d_type,const VkImageSubresourceRange & subresourceRange)4110 bool CoreChecks::ValidateCreateImageViewSubresourceRange(const layer_data *device_data, const IMAGE_STATE *image_state,
4111 bool is_imageview_2d_type,
4112 const VkImageSubresourceRange &subresourceRange) {
4113 bool is_khr_maintenance1 = GetDeviceExtensions()->vk_khr_maintenance1;
4114 bool is_image_slicable = image_state->createInfo.imageType == VK_IMAGE_TYPE_3D &&
4115 (image_state->createInfo.flags & VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR);
4116 bool is_3D_to_2D_map = is_khr_maintenance1 && is_image_slicable && is_imageview_2d_type;
4117
4118 const auto image_layer_count = is_3D_to_2D_map ? image_state->createInfo.extent.depth : image_state->createInfo.arrayLayers;
4119 const auto image_layer_count_var_name = is_3D_to_2D_map ? "extent.depth" : "arrayLayers";
4120
4121 SubresourceRangeErrorCodes subresourceRangeErrorCodes = {};
4122 subresourceRangeErrorCodes.base_mip_err = "VUID-VkImageViewCreateInfo-subresourceRange-01478";
4123 subresourceRangeErrorCodes.mip_count_err = "VUID-VkImageViewCreateInfo-subresourceRange-01718";
4124 subresourceRangeErrorCodes.base_layer_err = is_khr_maintenance1 ? (is_3D_to_2D_map ? "VUID-VkImageViewCreateInfo-image-01484"
4125 : "VUID-VkImageViewCreateInfo-image-01482")
4126 : "VUID-VkImageViewCreateInfo-subresourceRange-01480";
4127 subresourceRangeErrorCodes.layer_count_err = is_khr_maintenance1
4128 ? (is_3D_to_2D_map ? "VUID-VkImageViewCreateInfo-subresourceRange-01485"
4129 : "VUID-VkImageViewCreateInfo-subresourceRange-01483")
4130 : "VUID-VkImageViewCreateInfo-subresourceRange-01719";
4131
4132 return ValidateImageSubresourceRange(device_data, image_state->createInfo.mipLevels, image_layer_count, subresourceRange,
4133 "vkCreateImageView", "pCreateInfo->subresourceRange", image_layer_count_var_name,
4134 HandleToUint64(image_state->image), subresourceRangeErrorCodes);
4135 }
4136
ValidateCmdClearColorSubresourceRange(const layer_data * device_data,const IMAGE_STATE * image_state,const VkImageSubresourceRange & subresourceRange,const char * param_name)4137 bool CoreChecks::ValidateCmdClearColorSubresourceRange(const layer_data *device_data, const IMAGE_STATE *image_state,
4138 const VkImageSubresourceRange &subresourceRange, const char *param_name) {
4139 SubresourceRangeErrorCodes subresourceRangeErrorCodes = {};
4140 subresourceRangeErrorCodes.base_mip_err = "VUID-vkCmdClearColorImage-baseMipLevel-01470";
4141 subresourceRangeErrorCodes.mip_count_err = "VUID-vkCmdClearColorImage-pRanges-01692";
4142 subresourceRangeErrorCodes.base_layer_err = "VUID-vkCmdClearColorImage-baseArrayLayer-01472";
4143 subresourceRangeErrorCodes.layer_count_err = "VUID-vkCmdClearColorImage-pRanges-01693";
4144
4145 return ValidateImageSubresourceRange(device_data, image_state->createInfo.mipLevels, image_state->createInfo.arrayLayers,
4146 subresourceRange, "vkCmdClearColorImage", param_name, "arrayLayers",
4147 HandleToUint64(image_state->image), subresourceRangeErrorCodes);
4148 }
4149
ValidateCmdClearDepthSubresourceRange(const layer_data * device_data,const IMAGE_STATE * image_state,const VkImageSubresourceRange & subresourceRange,const char * param_name)4150 bool CoreChecks::ValidateCmdClearDepthSubresourceRange(const layer_data *device_data, const IMAGE_STATE *image_state,
4151 const VkImageSubresourceRange &subresourceRange, const char *param_name) {
4152 SubresourceRangeErrorCodes subresourceRangeErrorCodes = {};
4153 subresourceRangeErrorCodes.base_mip_err = "VUID-vkCmdClearDepthStencilImage-baseMipLevel-01474";
4154 subresourceRangeErrorCodes.mip_count_err = "VUID-vkCmdClearDepthStencilImage-pRanges-01694";
4155 subresourceRangeErrorCodes.base_layer_err = "VUID-vkCmdClearDepthStencilImage-baseArrayLayer-01476";
4156 subresourceRangeErrorCodes.layer_count_err = "VUID-vkCmdClearDepthStencilImage-pRanges-01695";
4157
4158 return ValidateImageSubresourceRange(device_data, image_state->createInfo.mipLevels, image_state->createInfo.arrayLayers,
4159 subresourceRange, "vkCmdClearDepthStencilImage", param_name, "arrayLayers",
4160 HandleToUint64(image_state->image), subresourceRangeErrorCodes);
4161 }
4162
ValidateImageBarrierSubresourceRange(const layer_data * device_data,const IMAGE_STATE * image_state,const VkImageSubresourceRange & subresourceRange,const char * cmd_name,const char * param_name)4163 bool CoreChecks::ValidateImageBarrierSubresourceRange(const layer_data *device_data, const IMAGE_STATE *image_state,
4164 const VkImageSubresourceRange &subresourceRange, const char *cmd_name,
4165 const char *param_name) {
4166 SubresourceRangeErrorCodes subresourceRangeErrorCodes = {};
4167 subresourceRangeErrorCodes.base_mip_err = "VUID-VkImageMemoryBarrier-subresourceRange-01486";
4168 subresourceRangeErrorCodes.mip_count_err = "VUID-VkImageMemoryBarrier-subresourceRange-01724";
4169 subresourceRangeErrorCodes.base_layer_err = "VUID-VkImageMemoryBarrier-subresourceRange-01488";
4170 subresourceRangeErrorCodes.layer_count_err = "VUID-VkImageMemoryBarrier-subresourceRange-01725";
4171
4172 return ValidateImageSubresourceRange(device_data, image_state->createInfo.mipLevels, image_state->createInfo.arrayLayers,
4173 subresourceRange, cmd_name, param_name, "arrayLayers", HandleToUint64(image_state->image),
4174 subresourceRangeErrorCodes);
4175 }
4176
PreCallValidateCreateImageView(VkDevice device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)4177 bool CoreChecks::PreCallValidateCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
4178 const VkAllocationCallbacks *pAllocator, VkImageView *pView) {
4179 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
4180 bool skip = false;
4181 IMAGE_STATE *image_state = GetImageState(pCreateInfo->image);
4182 if (image_state) {
4183 skip |= ValidateImageUsageFlags(
4184 device_data, image_state,
4185 VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT |
4186 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
4187 VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV,
4188 false, kVUIDUndefined, "vkCreateImageView()",
4189 "VK_IMAGE_USAGE_[SAMPLED|STORAGE|COLOR_ATTACHMENT|DEPTH_STENCIL_ATTACHMENT|INPUT_ATTACHMENT|SHADING_RATE_IMAGE]_BIT");
4190 // If this isn't a sparse image, it needs to have memory backing it at CreateImageView time
4191 skip |=
4192 ValidateMemoryIsBoundToImage(device_data, image_state, "vkCreateImageView()", "VUID-VkImageViewCreateInfo-image-01020");
4193 // Checks imported from image layer
4194 skip |= ValidateCreateImageViewSubresourceRange(
4195 device_data, image_state,
4196 pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_2D || pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_2D_ARRAY,
4197 pCreateInfo->subresourceRange);
4198
4199 VkImageCreateFlags image_flags = image_state->createInfo.flags;
4200 VkFormat image_format = image_state->createInfo.format;
4201 VkImageUsageFlags image_usage = image_state->createInfo.usage;
4202 VkImageTiling image_tiling = image_state->createInfo.tiling;
4203 VkFormat view_format = pCreateInfo->format;
4204 VkImageAspectFlags aspect_mask = pCreateInfo->subresourceRange.aspectMask;
4205 VkImageType image_type = image_state->createInfo.imageType;
4206 VkImageViewType view_type = pCreateInfo->viewType;
4207
4208 // If there's a chained VkImageViewUsageCreateInfo struct, modify image_usage to match
4209 auto chained_ivuci_struct = lvl_find_in_chain<VkImageViewUsageCreateInfoKHR>(pCreateInfo->pNext);
4210 if (chained_ivuci_struct) {
4211 if (chained_ivuci_struct->usage & ~image_usage) {
4212 std::stringstream ss;
4213 ss << "vkCreateImageView(): Chained VkImageViewUsageCreateInfo usage field (0x" << std::hex
4214 << chained_ivuci_struct->usage << ") must not include flags not present in underlying image's usage (0x"
4215 << image_usage << ").";
4216 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4217 HandleToUint64(pCreateInfo->image), "VUID-VkImageViewUsageCreateInfo-usage-01587", "%s",
4218 ss.str().c_str());
4219 }
4220
4221 image_usage = chained_ivuci_struct->usage;
4222 }
4223
4224 // Validate VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT state, if view/image formats differ
4225 if ((image_flags & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT) && (image_format != view_format)) {
4226 if (FormatIsMultiplane(image_format)) {
4227 // View format must match the multiplane compatible format
4228 uint32_t plane = 3; // invalid
4229 switch (aspect_mask) {
4230 case VK_IMAGE_ASPECT_PLANE_0_BIT:
4231 plane = 0;
4232 break;
4233 case VK_IMAGE_ASPECT_PLANE_1_BIT:
4234 plane = 1;
4235 break;
4236 case VK_IMAGE_ASPECT_PLANE_2_BIT:
4237 plane = 2;
4238 break;
4239 default:
4240 break;
4241 }
4242
4243 VkFormat compat_format = FindMultiplaneCompatibleFormat(image_format, plane);
4244 if (view_format != compat_format) {
4245 std::stringstream ss;
4246 ss << "vkCreateImageView(): ImageView format " << string_VkFormat(view_format)
4247 << " is not compatible with plane " << plane << " of underlying image format "
4248 << string_VkFormat(image_format) << ", must be " << string_VkFormat(compat_format) << ".";
4249 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4250 HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-image-01586", "%s",
4251 ss.str().c_str());
4252 }
4253 } else {
4254 if ((!GetDeviceExtensions()->vk_khr_maintenance2 ||
4255 !(image_flags & VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR))) {
4256 // Format MUST be compatible (in the same format compatibility class) as the format the image was created with
4257 if (FormatCompatibilityClass(image_format) != FormatCompatibilityClass(view_format)) {
4258 std::stringstream ss;
4259 ss << "vkCreateImageView(): ImageView format " << string_VkFormat(view_format)
4260 << " is not in the same format compatibility class as image ("
4261 << report_data->FormatHandle(pCreateInfo->image).c_str() << ") format " << string_VkFormat(image_format)
4262 << ". Images created with the VK_IMAGE_CREATE_MUTABLE_FORMAT BIT "
4263 << "can support ImageViews with differing formats but they must be in the same compatibility class.";
4264 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4265 HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-image-01018", "%s",
4266 ss.str().c_str());
4267 }
4268 }
4269 }
4270 } else {
4271 // Format MUST be IDENTICAL to the format the image was created with
4272 if (image_format != view_format) {
4273 std::stringstream ss;
4274 ss << "vkCreateImageView() format " << string_VkFormat(view_format) << " differs from image "
4275 << report_data->FormatHandle(pCreateInfo->image).c_str() << " format " << string_VkFormat(image_format)
4276 << ". Formats MUST be IDENTICAL unless VK_IMAGE_CREATE_MUTABLE_FORMAT BIT was set on image creation.";
4277 skip |=
4278 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4279 HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-image-01019", "%s", ss.str().c_str());
4280 }
4281 }
4282
4283 // Validate correct image aspect bits for desired formats and format consistency
4284 skip |= ValidateImageAspectMask(device_data, image_state->image, image_format, aspect_mask, "vkCreateImageView()");
4285
4286 switch (image_type) {
4287 case VK_IMAGE_TYPE_1D:
4288 if (view_type != VK_IMAGE_VIEW_TYPE_1D && view_type != VK_IMAGE_VIEW_TYPE_1D_ARRAY) {
4289 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4290 HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-subResourceRange-01021",
4291 "vkCreateImageView(): pCreateInfo->viewType %s is not compatible with image type %s.",
4292 string_VkImageViewType(view_type), string_VkImageType(image_type));
4293 }
4294 break;
4295 case VK_IMAGE_TYPE_2D:
4296 if (view_type != VK_IMAGE_VIEW_TYPE_2D && view_type != VK_IMAGE_VIEW_TYPE_2D_ARRAY) {
4297 if ((view_type == VK_IMAGE_VIEW_TYPE_CUBE || view_type == VK_IMAGE_VIEW_TYPE_CUBE_ARRAY) &&
4298 !(image_flags & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT)) {
4299 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4300 HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-image-01003",
4301 "vkCreateImageView(): pCreateInfo->viewType %s is not compatible with image type %s.",
4302 string_VkImageViewType(view_type), string_VkImageType(image_type));
4303 } else if (view_type != VK_IMAGE_VIEW_TYPE_CUBE && view_type != VK_IMAGE_VIEW_TYPE_CUBE_ARRAY) {
4304 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4305 HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-subResourceRange-01021",
4306 "vkCreateImageView(): pCreateInfo->viewType %s is not compatible with image type %s.",
4307 string_VkImageViewType(view_type), string_VkImageType(image_type));
4308 }
4309 }
4310 break;
4311 case VK_IMAGE_TYPE_3D:
4312 if (GetDeviceExtensions()->vk_khr_maintenance1) {
4313 if (view_type != VK_IMAGE_VIEW_TYPE_3D) {
4314 if ((view_type == VK_IMAGE_VIEW_TYPE_2D || view_type == VK_IMAGE_VIEW_TYPE_2D_ARRAY)) {
4315 if (!(image_flags & VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR)) {
4316 skip |=
4317 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4318 HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-image-01005",
4319 "vkCreateImageView(): pCreateInfo->viewType %s is not compatible with image type %s.",
4320 string_VkImageViewType(view_type), string_VkImageType(image_type));
4321 } else if ((image_flags & (VK_IMAGE_CREATE_SPARSE_BINDING_BIT | VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT |
4322 VK_IMAGE_CREATE_SPARSE_ALIASED_BIT))) {
4323 skip |=
4324 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4325 HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-subResourceRange-01021",
4326 "vkCreateImageView(): pCreateInfo->viewType %s is not compatible with image type %s "
4327 "when the VK_IMAGE_CREATE_SPARSE_BINDING_BIT, VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, or "
4328 "VK_IMAGE_CREATE_SPARSE_ALIASED_BIT flags are enabled.",
4329 string_VkImageViewType(view_type), string_VkImageType(image_type));
4330 }
4331 } else {
4332 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4333 HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-subResourceRange-01021",
4334 "vkCreateImageView(): pCreateInfo->viewType %s is not compatible with image type %s.",
4335 string_VkImageViewType(view_type), string_VkImageType(image_type));
4336 }
4337 }
4338 } else {
4339 if (view_type != VK_IMAGE_VIEW_TYPE_3D) {
4340 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4341 HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-subResourceRange-01021",
4342 "vkCreateImageView(): pCreateInfo->viewType %s is not compatible with image type %s.",
4343 string_VkImageViewType(view_type), string_VkImageType(image_type));
4344 }
4345 }
4346 break;
4347 default:
4348 break;
4349 }
4350
4351 // External format checks needed when VK_ANDROID_external_memory_android_hardware_buffer enabled
4352 if (GetDeviceExtensions()->vk_android_external_memory_android_hardware_buffer) {
4353 skip |= ValidateCreateImageViewANDROID(device_data, pCreateInfo);
4354 }
4355
4356 VkFormatProperties format_properties = GetPDFormatProperties(view_format);
4357 VkFormatFeatureFlags tiling_features = (image_tiling & VK_IMAGE_TILING_LINEAR) ? format_properties.linearTilingFeatures
4358 : format_properties.optimalTilingFeatures;
4359
4360 if (tiling_features == 0) {
4361 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4362 HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-None-02273",
4363 "vkCreateImageView(): pCreateInfo->format %s with tiling %s has no supported format features on this "
4364 "physical device.",
4365 string_VkFormat(view_format), string_VkImageTiling(image_tiling));
4366 } else if ((image_usage & VK_IMAGE_USAGE_SAMPLED_BIT) && !(tiling_features & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)) {
4367 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4368 HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-usage-02274",
4369 "vkCreateImageView(): pCreateInfo->format %s with tiling %s does not support usage that includes "
4370 "VK_IMAGE_USAGE_SAMPLED_BIT.",
4371 string_VkFormat(view_format), string_VkImageTiling(image_tiling));
4372 } else if ((image_usage & VK_IMAGE_USAGE_STORAGE_BIT) && !(tiling_features & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT)) {
4373 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4374 HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-usage-02275",
4375 "vkCreateImageView(): pCreateInfo->format %s with tiling %s does not support usage that includes "
4376 "VK_IMAGE_USAGE_STORAGE_BIT.",
4377 string_VkFormat(view_format), string_VkImageTiling(image_tiling));
4378 } else if ((image_usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) &&
4379 !(tiling_features & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT)) {
4380 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4381 HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-usage-02276",
4382 "vkCreateImageView(): pCreateInfo->format %s with tiling %s does not support usage that includes "
4383 "VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT.",
4384 string_VkFormat(view_format), string_VkImageTiling(image_tiling));
4385 } else if ((image_usage & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) &&
4386 !(tiling_features & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
4387 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4388 HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-usage-02277",
4389 "vkCreateImageView(): pCreateInfo->format %s with tiling %s does not support usage that includes "
4390 "VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT.",
4391 string_VkFormat(view_format), string_VkImageTiling(image_tiling));
4392 }
4393
4394 if (image_usage & VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV) {
4395 if (view_type != VK_IMAGE_VIEW_TYPE_2D && view_type != VK_IMAGE_VIEW_TYPE_2D_ARRAY) {
4396 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4397 HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-image-02086",
4398 "vkCreateImageView() If image was created with usage containing "
4399 "VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, viewType must be "
4400 "VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY.");
4401 }
4402 if (view_format != VK_FORMAT_R8_UINT) {
4403 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4404 HandleToUint64(pCreateInfo->image), "VUID-VkImageViewCreateInfo-image-02087",
4405 "vkCreateImageView() If image was created with usage containing "
4406 "VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, format must be VK_FORMAT_R8_UINT.");
4407 }
4408 }
4409 }
4410 return skip;
4411 }
4412
PostCallRecordCreateImageView(VkDevice device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView,VkResult result)4413 void CoreChecks::PostCallRecordCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
4414 const VkAllocationCallbacks *pAllocator, VkImageView *pView, VkResult result) {
4415 if (result != VK_SUCCESS) return;
4416 auto image_view_map = GetImageViewMap();
4417 (*image_view_map)[*pView] = std::unique_ptr<IMAGE_VIEW_STATE>(new IMAGE_VIEW_STATE(*pView, pCreateInfo));
4418
4419 auto image_state = GetImageState(pCreateInfo->image);
4420 auto &sub_res_range = (*image_view_map)[*pView].get()->create_info.subresourceRange;
4421 sub_res_range.levelCount = ResolveRemainingLevels(&sub_res_range, image_state->createInfo.mipLevels);
4422 sub_res_range.layerCount = ResolveRemainingLayers(&sub_res_range, image_state->createInfo.arrayLayers);
4423 }
4424
PreCallValidateCmdCopyBuffer(VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferCopy * pRegions)4425 bool CoreChecks::PreCallValidateCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
4426 uint32_t regionCount, const VkBufferCopy *pRegions) {
4427 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
4428 auto cb_node = GetCBNode(commandBuffer);
4429 auto src_buffer_state = GetBufferState(srcBuffer);
4430 auto dst_buffer_state = GetBufferState(dstBuffer);
4431
4432 bool skip = false;
4433 skip |=
4434 ValidateMemoryIsBoundToBuffer(device_data, src_buffer_state, "vkCmdCopyBuffer()", "VUID-vkCmdCopyBuffer-srcBuffer-00119");
4435 skip |=
4436 ValidateMemoryIsBoundToBuffer(device_data, dst_buffer_state, "vkCmdCopyBuffer()", "VUID-vkCmdCopyBuffer-dstBuffer-00121");
4437 // Validate that SRC & DST buffers have correct usage flags set
4438 skip |=
4439 ValidateBufferUsageFlags(device_data, src_buffer_state, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, true,
4440 "VUID-vkCmdCopyBuffer-srcBuffer-00118", "vkCmdCopyBuffer()", "VK_BUFFER_USAGE_TRANSFER_SRC_BIT");
4441 skip |=
4442 ValidateBufferUsageFlags(device_data, dst_buffer_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true,
4443 "VUID-vkCmdCopyBuffer-dstBuffer-00120", "vkCmdCopyBuffer()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
4444 skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdCopyBuffer()",
4445 VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
4446 "VUID-vkCmdCopyBuffer-commandBuffer-cmdpool");
4447 skip |= ValidateCmd(device_data, cb_node, CMD_COPYBUFFER, "vkCmdCopyBuffer()");
4448 skip |= InsideRenderPass(device_data, cb_node, "vkCmdCopyBuffer()", "VUID-vkCmdCopyBuffer-renderpass");
4449 return skip;
4450 }
4451
PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferCopy * pRegions)4452 void CoreChecks::PreCallRecordCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
4453 uint32_t regionCount, const VkBufferCopy *pRegions) {
4454 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
4455 auto cb_node = GetCBNode(commandBuffer);
4456 auto src_buffer_state = GetBufferState(srcBuffer);
4457 auto dst_buffer_state = GetBufferState(dstBuffer);
4458
4459 // Update bindings between buffers and cmd buffer
4460 AddCommandBufferBindingBuffer(device_data, cb_node, src_buffer_state);
4461 AddCommandBufferBindingBuffer(device_data, cb_node, dst_buffer_state);
4462 }
4463
ValidateIdleBuffer(layer_data * device_data,VkBuffer buffer)4464 bool CoreChecks::ValidateIdleBuffer(layer_data *device_data, VkBuffer buffer) {
4465 bool skip = false;
4466 auto buffer_state = GetBufferState(buffer);
4467 if (!buffer_state) {
4468 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, HandleToUint64(buffer),
4469 kVUID_Core_DrawState_DoubleDestroy, "Cannot free buffer %s that has not been allocated.",
4470 report_data->FormatHandle(buffer).c_str());
4471 } else {
4472 if (buffer_state->in_use.load()) {
4473 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
4474 HandleToUint64(buffer), "VUID-vkDestroyBuffer-buffer-00922",
4475 "Cannot free buffer %s that is in use by a command buffer.", report_data->FormatHandle(buffer).c_str());
4476 }
4477 }
4478 return skip;
4479 }
4480
PreCallValidateDestroyImageView(VkDevice device,VkImageView imageView,const VkAllocationCallbacks * pAllocator)4481 bool CoreChecks::PreCallValidateDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks *pAllocator) {
4482 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
4483 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
4484 VK_OBJECT obj_struct = {HandleToUint64(imageView), kVulkanObjectTypeImageView};
4485
4486 bool skip = false;
4487 if (image_view_state) {
4488 skip |= ValidateObjectNotInUse(device_data, image_view_state, obj_struct, "vkDestroyImageView",
4489 "VUID-vkDestroyImageView-imageView-01026");
4490 }
4491 return skip;
4492 }
4493
PreCallRecordDestroyImageView(VkDevice device,VkImageView imageView,const VkAllocationCallbacks * pAllocator)4494 void CoreChecks::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks *pAllocator) {
4495 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
4496 IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
4497 if (!image_view_state) return;
4498 VK_OBJECT obj_struct = {HandleToUint64(imageView), kVulkanObjectTypeImageView};
4499
4500 // Any bound cmd buffers are now invalid
4501 InvalidateCommandBuffers(device_data, image_view_state->cb_bindings, obj_struct);
4502 (*GetImageViewMap()).erase(imageView);
4503 }
4504
PreCallValidateDestroyBuffer(VkDevice device,VkBuffer buffer,const VkAllocationCallbacks * pAllocator)4505 bool CoreChecks::PreCallValidateDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
4506 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
4507 auto buffer_state = GetBufferState(buffer);
4508
4509 bool skip = false;
4510 if (buffer_state) {
4511 skip |= ValidateIdleBuffer(device_data, buffer);
4512 }
4513 return skip;
4514 }
4515
PreCallRecordDestroyBuffer(VkDevice device,VkBuffer buffer,const VkAllocationCallbacks * pAllocator)4516 void CoreChecks::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
4517 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
4518 if (!buffer) return;
4519 auto buffer_state = GetBufferState(buffer);
4520 VK_OBJECT obj_struct = {HandleToUint64(buffer), kVulkanObjectTypeBuffer};
4521
4522 InvalidateCommandBuffers(device_data, buffer_state->cb_bindings, obj_struct);
4523 for (auto mem_binding : buffer_state->GetBoundMemory()) {
4524 auto mem_info = GetMemObjInfo(mem_binding);
4525 if (mem_info) {
4526 RemoveBufferMemoryRange(HandleToUint64(buffer), mem_info);
4527 }
4528 }
4529 ClearMemoryObjectBindings(HandleToUint64(buffer), kVulkanObjectTypeBuffer);
4530 EraseQFOReleaseBarriers<VkBufferMemoryBarrier>(device_data, buffer);
4531 GetBufferMap()->erase(buffer_state->buffer);
4532 }
4533
PreCallValidateDestroyBufferView(VkDevice device,VkBufferView bufferView,const VkAllocationCallbacks * pAllocator)4534 bool CoreChecks::PreCallValidateDestroyBufferView(VkDevice device, VkBufferView bufferView,
4535 const VkAllocationCallbacks *pAllocator) {
4536 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
4537 auto buffer_view_state = GetBufferViewState(bufferView);
4538 VK_OBJECT obj_struct = {HandleToUint64(bufferView), kVulkanObjectTypeBufferView};
4539 bool skip = false;
4540 if (buffer_view_state) {
4541 skip |= ValidateObjectNotInUse(device_data, buffer_view_state, obj_struct, "vkDestroyBufferView",
4542 "VUID-vkDestroyBufferView-bufferView-00936");
4543 }
4544 return skip;
4545 }
4546
PreCallRecordDestroyBufferView(VkDevice device,VkBufferView bufferView,const VkAllocationCallbacks * pAllocator)4547 void CoreChecks::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks *pAllocator) {
4548 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
4549 if (!bufferView) return;
4550 auto buffer_view_state = GetBufferViewState(bufferView);
4551 VK_OBJECT obj_struct = {HandleToUint64(bufferView), kVulkanObjectTypeBufferView};
4552
4553 // Any bound cmd buffers are now invalid
4554 InvalidateCommandBuffers(device_data, buffer_view_state->cb_bindings, obj_struct);
4555 GetBufferViewMap()->erase(bufferView);
4556 }
4557
PreCallValidateCmdFillBuffer(VkCommandBuffer commandBuffer,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize size,uint32_t data)4558 bool CoreChecks::PreCallValidateCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
4559 VkDeviceSize size, uint32_t data) {
4560 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
4561 auto cb_node = GetCBNode(commandBuffer);
4562 auto buffer_state = GetBufferState(dstBuffer);
4563 bool skip = false;
4564 skip |= ValidateMemoryIsBoundToBuffer(device_data, buffer_state, "vkCmdFillBuffer()", "VUID-vkCmdFillBuffer-dstBuffer-00031");
4565 skip |= ValidateCmdQueueFlags(device_data, cb_node, "vkCmdFillBuffer()",
4566 VK_QUEUE_TRANSFER_BIT | VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT,
4567 "VUID-vkCmdFillBuffer-commandBuffer-cmdpool");
4568 skip |= ValidateCmd(device_data, cb_node, CMD_FILLBUFFER, "vkCmdFillBuffer()");
4569 // Validate that DST buffer has correct usage flags set
4570 skip |=
4571 ValidateBufferUsageFlags(device_data, buffer_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true,
4572 "VUID-vkCmdFillBuffer-dstBuffer-00029", "vkCmdFillBuffer()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
4573 skip |= InsideRenderPass(device_data, cb_node, "vkCmdFillBuffer()", "VUID-vkCmdFillBuffer-renderpass");
4574 return skip;
4575 }
4576
PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize size,uint32_t data)4577 void CoreChecks::PreCallRecordCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
4578 VkDeviceSize size, uint32_t data) {
4579 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
4580 auto cb_node = GetCBNode(commandBuffer);
4581 auto buffer_state = GetBufferState(dstBuffer);
4582 // Update bindings between buffer and cmd buffer
4583 AddCommandBufferBindingBuffer(device_data, cb_node, buffer_state);
4584 }
4585
ValidateBufferImageCopyData(const debug_report_data * report_data,uint32_t regionCount,const VkBufferImageCopy * pRegions,IMAGE_STATE * image_state,const char * function)4586 bool CoreChecks::ValidateBufferImageCopyData(const debug_report_data *report_data, uint32_t regionCount,
4587 const VkBufferImageCopy *pRegions, IMAGE_STATE *image_state, const char *function) {
4588 bool skip = false;
4589
4590 for (uint32_t i = 0; i < regionCount; i++) {
4591 if (image_state->createInfo.imageType == VK_IMAGE_TYPE_1D) {
4592 if ((pRegions[i].imageOffset.y != 0) || (pRegions[i].imageExtent.height != 1)) {
4593 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4594 HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-srcImage-00199",
4595 "%s(): pRegion[%d] imageOffset.y is %d and imageExtent.height is %d. For 1D images these must be 0 "
4596 "and 1, respectively.",
4597 function, i, pRegions[i].imageOffset.y, pRegions[i].imageExtent.height);
4598 }
4599 }
4600
4601 if ((image_state->createInfo.imageType == VK_IMAGE_TYPE_1D) || (image_state->createInfo.imageType == VK_IMAGE_TYPE_2D)) {
4602 if ((pRegions[i].imageOffset.z != 0) || (pRegions[i].imageExtent.depth != 1)) {
4603 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4604 HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-srcImage-00201",
4605 "%s(): pRegion[%d] imageOffset.z is %d and imageExtent.depth is %d. For 1D and 2D images these "
4606 "must be 0 and 1, respectively.",
4607 function, i, pRegions[i].imageOffset.z, pRegions[i].imageExtent.depth);
4608 }
4609 }
4610
4611 if (image_state->createInfo.imageType == VK_IMAGE_TYPE_3D) {
4612 if ((0 != pRegions[i].imageSubresource.baseArrayLayer) || (1 != pRegions[i].imageSubresource.layerCount)) {
4613 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4614 HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-baseArrayLayer-00213",
4615 "%s(): pRegion[%d] imageSubresource.baseArrayLayer is %d and imageSubresource.layerCount is %d. "
4616 "For 3D images these must be 0 and 1, respectively.",
4617 function, i, pRegions[i].imageSubresource.baseArrayLayer, pRegions[i].imageSubresource.layerCount);
4618 }
4619 }
4620
4621 // If the the calling command's VkImage parameter's format is not a depth/stencil format,
4622 // then bufferOffset must be a multiple of the calling command's VkImage parameter's element size
4623 uint32_t element_size = FormatElementSize(image_state->createInfo.format);
4624 if (!FormatIsDepthAndStencil(image_state->createInfo.format) && SafeModulo(pRegions[i].bufferOffset, element_size) != 0) {
4625 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4626 HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-bufferOffset-00193",
4627 "%s(): pRegion[%d] bufferOffset 0x%" PRIxLEAST64
4628 " must be a multiple of this format's texel size (%" PRIu32 ").",
4629 function, i, pRegions[i].bufferOffset, element_size);
4630 }
4631
4632 // BufferOffset must be a multiple of 4
4633 if (SafeModulo(pRegions[i].bufferOffset, 4) != 0) {
4634 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4635 HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-bufferOffset-00194",
4636 "%s(): pRegion[%d] bufferOffset 0x%" PRIxLEAST64 " must be a multiple of 4.", function, i,
4637 pRegions[i].bufferOffset);
4638 }
4639
4640 // BufferRowLength must be 0, or greater than or equal to the width member of imageExtent
4641 if ((pRegions[i].bufferRowLength != 0) && (pRegions[i].bufferRowLength < pRegions[i].imageExtent.width)) {
4642 skip |=
4643 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4644 HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-bufferRowLength-00195",
4645 "%s(): pRegion[%d] bufferRowLength (%d) must be zero or greater-than-or-equal-to imageExtent.width (%d).",
4646 function, i, pRegions[i].bufferRowLength, pRegions[i].imageExtent.width);
4647 }
4648
4649 // BufferImageHeight must be 0, or greater than or equal to the height member of imageExtent
4650 if ((pRegions[i].bufferImageHeight != 0) && (pRegions[i].bufferImageHeight < pRegions[i].imageExtent.height)) {
4651 skip |= log_msg(
4652 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4653 HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-bufferImageHeight-00196",
4654 "%s(): pRegion[%d] bufferImageHeight (%d) must be zero or greater-than-or-equal-to imageExtent.height (%d).",
4655 function, i, pRegions[i].bufferImageHeight, pRegions[i].imageExtent.height);
4656 }
4657
4658 // subresource aspectMask must have exactly 1 bit set
4659 const int num_bits = sizeof(VkFlags) * CHAR_BIT;
4660 std::bitset<num_bits> aspect_mask_bits(pRegions[i].imageSubresource.aspectMask);
4661 if (aspect_mask_bits.count() != 1) {
4662 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4663 HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-aspectMask-00212",
4664 "%s: aspectMasks for imageSubresource in each region must have only a single bit set.", function);
4665 }
4666
4667 // image subresource aspect bit must match format
4668 if (!VerifyAspectsPresent(pRegions[i].imageSubresource.aspectMask, image_state->createInfo.format)) {
4669 skip |= log_msg(
4670 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4671 HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-aspectMask-00211",
4672 "%s(): pRegion[%d] subresource aspectMask 0x%x specifies aspects that are not present in image format 0x%x.",
4673 function, i, pRegions[i].imageSubresource.aspectMask, image_state->createInfo.format);
4674 }
4675
4676 // Checks that apply only to compressed images
4677 if (FormatIsCompressed(image_state->createInfo.format) || FormatIsSinglePlane_422(image_state->createInfo.format)) {
4678 auto block_size = FormatTexelBlockExtent(image_state->createInfo.format);
4679
4680 // BufferRowLength must be a multiple of block width
4681 if (SafeModulo(pRegions[i].bufferRowLength, block_size.width) != 0) {
4682 skip |= log_msg(
4683 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4684 HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-bufferRowLength-00203",
4685 "%s(): pRegion[%d] bufferRowLength (%d) must be a multiple of the compressed image's texel width (%d)..",
4686 function, i, pRegions[i].bufferRowLength, block_size.width);
4687 }
4688
4689 // BufferRowHeight must be a multiple of block height
4690 if (SafeModulo(pRegions[i].bufferImageHeight, block_size.height) != 0) {
4691 skip |= log_msg(
4692 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4693 HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-bufferImageHeight-00204",
4694 "%s(): pRegion[%d] bufferImageHeight (%d) must be a multiple of the compressed image's texel height (%d)..",
4695 function, i, pRegions[i].bufferImageHeight, block_size.height);
4696 }
4697
4698 // image offsets must be multiples of block dimensions
4699 if ((SafeModulo(pRegions[i].imageOffset.x, block_size.width) != 0) ||
4700 (SafeModulo(pRegions[i].imageOffset.y, block_size.height) != 0) ||
4701 (SafeModulo(pRegions[i].imageOffset.z, block_size.depth) != 0)) {
4702 skip |=
4703 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4704 HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-imageOffset-00205",
4705 "%s(): pRegion[%d] imageOffset(x,y) (%d, %d) must be multiples of the compressed image's texel "
4706 "width & height (%d, %d)..",
4707 function, i, pRegions[i].imageOffset.x, pRegions[i].imageOffset.y, block_size.width, block_size.height);
4708 }
4709
4710 // bufferOffset must be a multiple of block size (linear bytes)
4711 uint32_t block_size_in_bytes = FormatElementSize(image_state->createInfo.format);
4712 if (SafeModulo(pRegions[i].bufferOffset, block_size_in_bytes) != 0) {
4713 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4714 HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-bufferOffset-00206",
4715 "%s(): pRegion[%d] bufferOffset (0x%" PRIxLEAST64
4716 ") must be a multiple of the compressed image's texel block size (%" PRIu32 ")..",
4717 function, i, pRegions[i].bufferOffset, block_size_in_bytes);
4718 }
4719
4720 // imageExtent width must be a multiple of block width, or extent+offset width must equal subresource width
4721 VkExtent3D mip_extent = GetImageSubresourceExtent(image_state, &(pRegions[i].imageSubresource));
4722 if ((SafeModulo(pRegions[i].imageExtent.width, block_size.width) != 0) &&
4723 (pRegions[i].imageExtent.width + pRegions[i].imageOffset.x != mip_extent.width)) {
4724 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4725 HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-imageExtent-00207",
4726 "%s(): pRegion[%d] extent width (%d) must be a multiple of the compressed texture block width "
4727 "(%d), or when added to offset.x (%d) must equal the image subresource width (%d)..",
4728 function, i, pRegions[i].imageExtent.width, block_size.width, pRegions[i].imageOffset.x,
4729 mip_extent.width);
4730 }
4731
4732 // imageExtent height must be a multiple of block height, or extent+offset height must equal subresource height
4733 if ((SafeModulo(pRegions[i].imageExtent.height, block_size.height) != 0) &&
4734 (pRegions[i].imageExtent.height + pRegions[i].imageOffset.y != mip_extent.height)) {
4735 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4736 HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-imageExtent-00208",
4737 "%s(): pRegion[%d] extent height (%d) must be a multiple of the compressed texture block height "
4738 "(%d), or when added to offset.y (%d) must equal the image subresource height (%d)..",
4739 function, i, pRegions[i].imageExtent.height, block_size.height, pRegions[i].imageOffset.y,
4740 mip_extent.height);
4741 }
4742
4743 // imageExtent depth must be a multiple of block depth, or extent+offset depth must equal subresource depth
4744 if ((SafeModulo(pRegions[i].imageExtent.depth, block_size.depth) != 0) &&
4745 (pRegions[i].imageExtent.depth + pRegions[i].imageOffset.z != mip_extent.depth)) {
4746 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
4747 HandleToUint64(image_state->image), "VUID-VkBufferImageCopy-imageExtent-00209",
4748 "%s(): pRegion[%d] extent width (%d) must be a multiple of the compressed texture block depth "
4749 "(%d), or when added to offset.z (%d) must equal the image subresource depth (%d)..",
4750 function, i, pRegions[i].imageExtent.depth, block_size.depth, pRegions[i].imageOffset.z,
4751 mip_extent.depth);
4752 }
4753 }
4754 }
4755
4756 return skip;
4757 }
4758
ValidateImageBounds(const debug_report_data * report_data,const IMAGE_STATE * image_state,const uint32_t regionCount,const VkBufferImageCopy * pRegions,const char * func_name,const char * msg_code)4759 static bool ValidateImageBounds(const debug_report_data *report_data, const IMAGE_STATE *image_state, const uint32_t regionCount,
4760 const VkBufferImageCopy *pRegions, const char *func_name, const char *msg_code) {
4761 bool skip = false;
4762 const VkImageCreateInfo *image_info = &(image_state->createInfo);
4763
4764 for (uint32_t i = 0; i < regionCount; i++) {
4765 VkExtent3D extent = pRegions[i].imageExtent;
4766 VkOffset3D offset = pRegions[i].imageOffset;
4767
4768 if (IsExtentSizeZero(&extent)) // Warn on zero area subresource
4769 {
4770 skip |=
4771 log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)0,
4772 kVUID_Core_Image_ZeroAreaSubregion, "%s: pRegion[%d] imageExtent of {%1d, %1d, %1d} has zero area",
4773 func_name, i, extent.width, extent.height, extent.depth);
4774 }
4775
4776 VkExtent3D image_extent = GetImageSubresourceExtent(image_state, &(pRegions[i].imageSubresource));
4777
4778 // If we're using a compressed format, valid extent is rounded up to multiple of block size (per 18.1)
4779 if (FormatIsCompressed(image_info->format)) {
4780 auto block_extent = FormatTexelBlockExtent(image_info->format);
4781 if (image_extent.width % block_extent.width) {
4782 image_extent.width += (block_extent.width - (image_extent.width % block_extent.width));
4783 }
4784 if (image_extent.height % block_extent.height) {
4785 image_extent.height += (block_extent.height - (image_extent.height % block_extent.height));
4786 }
4787 if (image_extent.depth % block_extent.depth) {
4788 image_extent.depth += (block_extent.depth - (image_extent.depth % block_extent.depth));
4789 }
4790 }
4791
4792 if (0 != ExceedsBounds(&offset, &extent, &image_extent)) {
4793 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)0,
4794 msg_code, "%s: pRegion[%d] exceeds image bounds..", func_name, i);
4795 }
4796 }
4797
4798 return skip;
4799 }
4800
ValidateBufferBounds(const debug_report_data * report_data,IMAGE_STATE * image_state,BUFFER_STATE * buff_state,uint32_t regionCount,const VkBufferImageCopy * pRegions,const char * func_name,const char * msg_code)4801 static inline bool ValidateBufferBounds(const debug_report_data *report_data, IMAGE_STATE *image_state, BUFFER_STATE *buff_state,
4802 uint32_t regionCount, const VkBufferImageCopy *pRegions, const char *func_name,
4803 const char *msg_code) {
4804 bool skip = false;
4805
4806 VkDeviceSize buffer_size = buff_state->createInfo.size;
4807
4808 for (uint32_t i = 0; i < regionCount; i++) {
4809 VkExtent3D copy_extent = pRegions[i].imageExtent;
4810
4811 VkDeviceSize buffer_width = (0 == pRegions[i].bufferRowLength ? copy_extent.width : pRegions[i].bufferRowLength);
4812 VkDeviceSize buffer_height = (0 == pRegions[i].bufferImageHeight ? copy_extent.height : pRegions[i].bufferImageHeight);
4813 VkDeviceSize unit_size = FormatElementSize(image_state->createInfo.format); // size (bytes) of texel or block
4814
4815 // Handle special buffer packing rules for specific depth/stencil formats
4816 if (pRegions[i].imageSubresource.aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) {
4817 unit_size = FormatElementSize(VK_FORMAT_S8_UINT);
4818 } else if (pRegions[i].imageSubresource.aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) {
4819 switch (image_state->createInfo.format) {
4820 case VK_FORMAT_D16_UNORM_S8_UINT:
4821 unit_size = FormatElementSize(VK_FORMAT_D16_UNORM);
4822 break;
4823 case VK_FORMAT_D32_SFLOAT_S8_UINT:
4824 unit_size = FormatElementSize(VK_FORMAT_D32_SFLOAT);
4825 break;
4826 case VK_FORMAT_X8_D24_UNORM_PACK32: // Fall through
4827 case VK_FORMAT_D24_UNORM_S8_UINT:
4828 unit_size = 4;
4829 break;
4830 default:
4831 break;
4832 }
4833 }
4834
4835 if (FormatIsCompressed(image_state->createInfo.format) || FormatIsSinglePlane_422(image_state->createInfo.format)) {
4836 // Switch to texel block units, rounding up for any partially-used blocks
4837 auto block_dim = FormatTexelBlockExtent(image_state->createInfo.format);
4838 buffer_width = (buffer_width + block_dim.width - 1) / block_dim.width;
4839 buffer_height = (buffer_height + block_dim.height - 1) / block_dim.height;
4840
4841 copy_extent.width = (copy_extent.width + block_dim.width - 1) / block_dim.width;
4842 copy_extent.height = (copy_extent.height + block_dim.height - 1) / block_dim.height;
4843 copy_extent.depth = (copy_extent.depth + block_dim.depth - 1) / block_dim.depth;
4844 }
4845
4846 // Either depth or layerCount may be greater than 1 (not both). This is the number of 'slices' to copy
4847 uint32_t z_copies = std::max(copy_extent.depth, pRegions[i].imageSubresource.layerCount);
4848 if (IsExtentSizeZero(©_extent) || (0 == z_copies)) {
4849 // TODO: Issue warning here? Already warned in ValidateImageBounds()...
4850 } else {
4851 // Calculate buffer offset of final copied byte, + 1.
4852 VkDeviceSize max_buffer_offset = (z_copies - 1) * buffer_height * buffer_width; // offset to slice
4853 max_buffer_offset += ((copy_extent.height - 1) * buffer_width) + copy_extent.width; // add row,col
4854 max_buffer_offset *= unit_size; // convert to bytes
4855 max_buffer_offset += pRegions[i].bufferOffset; // add initial offset (bytes)
4856
4857 if (buffer_size < max_buffer_offset) {
4858 skip |=
4859 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)0,
4860 msg_code, "%s: pRegion[%d] exceeds buffer size of %" PRIu64 " bytes..", func_name, i, buffer_size);
4861 }
4862 }
4863 }
4864
4865 return skip;
4866 }
4867
PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferImageCopy * pRegions)4868 bool CoreChecks::PreCallValidateCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
4869 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) {
4870 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
4871 auto cb_node = GetCBNode(commandBuffer);
4872 auto src_image_state = GetImageState(srcImage);
4873 auto dst_buffer_state = GetBufferState(dstBuffer);
4874
4875 bool skip = ValidateBufferImageCopyData(report_data, regionCount, pRegions, src_image_state, "vkCmdCopyImageToBuffer");
4876
4877 // Validate command buffer state
4878 skip |= ValidateCmd(device_data, cb_node, CMD_COPYIMAGETOBUFFER, "vkCmdCopyImageToBuffer()");
4879
4880 // Command pool must support graphics, compute, or transfer operations
4881 auto pPool = GetCommandPoolNode(cb_node->createInfo.commandPool);
4882
4883 VkQueueFlags queue_flags = GetPhysicalDeviceState()->queue_family_properties[pPool->queueFamilyIndex].queueFlags;
4884
4885 if (0 == (queue_flags & (VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT))) {
4886 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
4887 HandleToUint64(cb_node->createInfo.commandPool), "VUID-vkCmdCopyImageToBuffer-commandBuffer-cmdpool",
4888 "Cannot call vkCmdCopyImageToBuffer() on a command buffer allocated from a pool without graphics, compute, "
4889 "or transfer capabilities..");
4890 }
4891 skip |= ValidateImageBounds(report_data, src_image_state, regionCount, pRegions, "vkCmdCopyImageToBuffer()",
4892 "VUID-vkCmdCopyImageToBuffer-pRegions-00182");
4893 skip |= ValidateBufferBounds(report_data, src_image_state, dst_buffer_state, regionCount, pRegions, "vkCmdCopyImageToBuffer()",
4894 "VUID-vkCmdCopyImageToBuffer-pRegions-00183");
4895
4896 skip |= ValidateImageSampleCount(device_data, src_image_state, VK_SAMPLE_COUNT_1_BIT, "vkCmdCopyImageToBuffer(): srcImage",
4897 "VUID-vkCmdCopyImageToBuffer-srcImage-00188");
4898 skip |= ValidateMemoryIsBoundToImage(device_data, src_image_state, "vkCmdCopyImageToBuffer()",
4899 "VUID-vkCmdCopyImageToBuffer-srcImage-00187");
4900 skip |= ValidateMemoryIsBoundToBuffer(device_data, dst_buffer_state, "vkCmdCopyImageToBuffer()",
4901 "VUID-vkCmdCopyImageToBuffer-dstBuffer-00192");
4902
4903 // Validate that SRC image & DST buffer have correct usage flags set
4904 skip |= ValidateImageUsageFlags(device_data, src_image_state, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, true,
4905 "VUID-vkCmdCopyImageToBuffer-srcImage-00186", "vkCmdCopyImageToBuffer()",
4906 "VK_IMAGE_USAGE_TRANSFER_SRC_BIT");
4907 skip |= ValidateBufferUsageFlags(device_data, dst_buffer_state, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true,
4908 "VUID-vkCmdCopyImageToBuffer-dstBuffer-00191", "vkCmdCopyImageToBuffer()",
4909 "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
4910 if (GetApiVersion() >= VK_API_VERSION_1_1 || GetDeviceExtensions()->vk_khr_maintenance1) {
4911 skip |= ValidateImageFormatFeatureFlags(device_data, src_image_state, VK_FORMAT_FEATURE_TRANSFER_SRC_BIT,
4912 "vkCmdCopyImageToBuffer()", "VUID-vkCmdCopyImageToBuffer-srcImage-01998",
4913 "VUID-vkCmdCopyImageToBuffer-srcImage-01998");
4914 }
4915 skip |= InsideRenderPass(device_data, cb_node, "vkCmdCopyImageToBuffer()", "VUID-vkCmdCopyImageToBuffer-renderpass");
4916 bool hit_error = false;
4917 const char *src_invalid_layout_vuid =
4918 (src_image_state->shared_presentable && GetDeviceExtensions()->vk_khr_shared_presentable_image)
4919 ? "VUID-vkCmdCopyImageToBuffer-srcImageLayout-01397"
4920 : "VUID-vkCmdCopyImageToBuffer-srcImageLayout-00190";
4921 for (uint32_t i = 0; i < regionCount; ++i) {
4922 skip |= ValidateImageSubresourceLayers(device_data, cb_node, &pRegions[i].imageSubresource, "vkCmdCopyImageToBuffer()",
4923 "imageSubresource", i);
4924 skip |= VerifyImageLayout(device_data, cb_node, src_image_state, pRegions[i].imageSubresource, srcImageLayout,
4925 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "vkCmdCopyImageToBuffer()", src_invalid_layout_vuid,
4926 "VUID-vkCmdCopyImageToBuffer-srcImageLayout-00189", &hit_error);
4927 skip |= ValidateCopyBufferImageTransferGranularityRequirements(device_data, cb_node, src_image_state, &pRegions[i], i,
4928 "vkCmdCopyImageToBuffer()",
4929 "VUID-vkCmdCopyImageToBuffer-imageOffset-01794");
4930 skip |= ValidateImageMipLevel(device_data, cb_node, src_image_state, pRegions[i].imageSubresource.mipLevel, i,
4931 "vkCmdCopyImageToBuffer()", "imageSubresource",
4932 "VUID-vkCmdCopyImageToBuffer-imageSubresource-01703");
4933 skip |= ValidateImageArrayLayerRange(device_data, cb_node, src_image_state, pRegions[i].imageSubresource.baseArrayLayer,
4934 pRegions[i].imageSubresource.layerCount, i, "vkCmdCopyImageToBuffer()",
4935 "imageSubresource", "VUID-vkCmdCopyImageToBuffer-imageSubresource-01704");
4936 }
4937 return skip;
4938 }
4939
PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferImageCopy * pRegions)4940 void CoreChecks::PreCallRecordCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
4941 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) {
4942 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
4943 auto cb_node = GetCBNode(commandBuffer);
4944 auto src_image_state = GetImageState(srcImage);
4945 auto dst_buffer_state = GetBufferState(dstBuffer);
4946
4947 // Make sure that all image slices are updated to correct layout
4948 for (uint32_t i = 0; i < regionCount; ++i) {
4949 SetImageLayout(device_data, cb_node, src_image_state, pRegions[i].imageSubresource, srcImageLayout);
4950 }
4951 // Update bindings between buffer/image and cmd buffer
4952 AddCommandBufferBindingImage(device_data, cb_node, src_image_state);
4953 AddCommandBufferBindingBuffer(device_data, cb_node, dst_buffer_state);
4954 }
4955
PreCallValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkBufferImageCopy * pRegions)4956 bool CoreChecks::PreCallValidateCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
4957 VkImageLayout dstImageLayout, uint32_t regionCount,
4958 const VkBufferImageCopy *pRegions) {
4959 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
4960 auto cb_node = GetCBNode(commandBuffer);
4961 auto src_buffer_state = GetBufferState(srcBuffer);
4962 auto dst_image_state = GetImageState(dstImage);
4963
4964 bool skip = ValidateBufferImageCopyData(report_data, regionCount, pRegions, dst_image_state, "vkCmdCopyBufferToImage");
4965
4966 // Validate command buffer state
4967 skip |= ValidateCmd(device_data, cb_node, CMD_COPYBUFFERTOIMAGE, "vkCmdCopyBufferToImage()");
4968
4969 // Command pool must support graphics, compute, or transfer operations
4970 auto pPool = GetCommandPoolNode(cb_node->createInfo.commandPool);
4971 VkQueueFlags queue_flags = GetPhysicalDeviceState()->queue_family_properties[pPool->queueFamilyIndex].queueFlags;
4972 if (0 == (queue_flags & (VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT))) {
4973 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
4974 HandleToUint64(cb_node->createInfo.commandPool), "VUID-vkCmdCopyBufferToImage-commandBuffer-cmdpool",
4975 "Cannot call vkCmdCopyBufferToImage() on a command buffer allocated from a pool without graphics, compute, "
4976 "or transfer capabilities..");
4977 }
4978 skip |= ValidateImageBounds(report_data, dst_image_state, regionCount, pRegions, "vkCmdCopyBufferToImage()",
4979 "VUID-vkCmdCopyBufferToImage-pRegions-00172");
4980 skip |= ValidateBufferBounds(report_data, dst_image_state, src_buffer_state, regionCount, pRegions, "vkCmdCopyBufferToImage()",
4981 "VUID-vkCmdCopyBufferToImage-pRegions-00171");
4982 skip |= ValidateImageSampleCount(device_data, dst_image_state, VK_SAMPLE_COUNT_1_BIT, "vkCmdCopyBufferToImage(): dstImage",
4983 "VUID-vkCmdCopyBufferToImage-dstImage-00179");
4984 skip |= ValidateMemoryIsBoundToBuffer(device_data, src_buffer_state, "vkCmdCopyBufferToImage()",
4985 "VUID-vkCmdCopyBufferToImage-srcBuffer-00176");
4986 skip |= ValidateMemoryIsBoundToImage(device_data, dst_image_state, "vkCmdCopyBufferToImage()",
4987 "VUID-vkCmdCopyBufferToImage-dstImage-00178");
4988 skip |= ValidateBufferUsageFlags(device_data, src_buffer_state, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, true,
4989 "VUID-vkCmdCopyBufferToImage-srcBuffer-00174", "vkCmdCopyBufferToImage()",
4990 "VK_BUFFER_USAGE_TRANSFER_SRC_BIT");
4991 skip |= ValidateImageUsageFlags(device_data, dst_image_state, VK_IMAGE_USAGE_TRANSFER_DST_BIT, true,
4992 "VUID-vkCmdCopyBufferToImage-dstImage-00177", "vkCmdCopyBufferToImage()",
4993 "VK_IMAGE_USAGE_TRANSFER_DST_BIT");
4994 if (GetApiVersion() >= VK_API_VERSION_1_1 || GetDeviceExtensions()->vk_khr_maintenance1) {
4995 skip |= ValidateImageFormatFeatureFlags(device_data, dst_image_state, VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
4996 "vkCmdCopyBufferToImage()", "VUID-vkCmdCopyBufferToImage-dstImage-01997",
4997 "VUID-vkCmdCopyBufferToImage-dstImage-01997");
4998 }
4999 skip |= InsideRenderPass(device_data, cb_node, "vkCmdCopyBufferToImage()", "VUID-vkCmdCopyBufferToImage-renderpass");
5000 bool hit_error = false;
5001 const char *dst_invalid_layout_vuid =
5002 (dst_image_state->shared_presentable && GetDeviceExtensions()->vk_khr_shared_presentable_image)
5003 ? "VUID-vkCmdCopyBufferToImage-dstImageLayout-01396"
5004 : "VUID-vkCmdCopyBufferToImage-dstImageLayout-00181";
5005 for (uint32_t i = 0; i < regionCount; ++i) {
5006 skip |= ValidateImageSubresourceLayers(device_data, cb_node, &pRegions[i].imageSubresource, "vkCmdCopyBufferToImage()",
5007 "imageSubresource", i);
5008 skip |= VerifyImageLayout(device_data, cb_node, dst_image_state, pRegions[i].imageSubresource, dstImageLayout,
5009 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "vkCmdCopyBufferToImage()", dst_invalid_layout_vuid,
5010 "VUID-vkCmdCopyBufferToImage-dstImageLayout-00180", &hit_error);
5011 skip |= ValidateCopyBufferImageTransferGranularityRequirements(device_data, cb_node, dst_image_state, &pRegions[i], i,
5012 "vkCmdCopyBufferToImage()",
5013 "VUID-vkCmdCopyBufferToImage-imageOffset-01793");
5014 skip |= ValidateImageMipLevel(device_data, cb_node, dst_image_state, pRegions[i].imageSubresource.mipLevel, i,
5015 "vkCmdCopyBufferToImage()", "imageSubresource",
5016 "VUID-vkCmdCopyBufferToImage-imageSubresource-01701");
5017 skip |= ValidateImageArrayLayerRange(device_data, cb_node, dst_image_state, pRegions[i].imageSubresource.baseArrayLayer,
5018 pRegions[i].imageSubresource.layerCount, i, "vkCmdCopyBufferToImage()",
5019 "imageSubresource", "VUID-vkCmdCopyBufferToImage-imageSubresource-01702");
5020 }
5021 return skip;
5022 }
5023
PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkBufferImageCopy * pRegions)5024 void CoreChecks::PreCallRecordCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
5025 VkImageLayout dstImageLayout, uint32_t regionCount,
5026 const VkBufferImageCopy *pRegions) {
5027 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map);
5028 auto cb_node = GetCBNode(commandBuffer);
5029 auto src_buffer_state = GetBufferState(srcBuffer);
5030 auto dst_image_state = GetImageState(dstImage);
5031
5032 // Make sure that all image slices are updated to correct layout
5033 for (uint32_t i = 0; i < regionCount; ++i) {
5034 SetImageLayout(device_data, cb_node, dst_image_state, pRegions[i].imageSubresource, dstImageLayout);
5035 }
5036 AddCommandBufferBindingBuffer(device_data, cb_node, src_buffer_state);
5037 AddCommandBufferBindingImage(device_data, cb_node, dst_image_state);
5038 }
5039
PreCallValidateGetImageSubresourceLayout(VkDevice device,VkImage image,const VkImageSubresource * pSubresource,VkSubresourceLayout * pLayout)5040 bool CoreChecks::PreCallValidateGetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource *pSubresource,
5041 VkSubresourceLayout *pLayout) {
5042 layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map);
5043 const auto report_data = device_data->report_data;
5044 bool skip = false;
5045 const VkImageAspectFlags sub_aspect = pSubresource->aspectMask;
5046
5047 // The aspectMask member of pSubresource must only have a single bit set
5048 const int num_bits = sizeof(sub_aspect) * CHAR_BIT;
5049 std::bitset<num_bits> aspect_mask_bits(sub_aspect);
5050 if (aspect_mask_bits.count() != 1) {
5051 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image),
5052 "VUID-vkGetImageSubresourceLayout-aspectMask-00997",
5053 "vkGetImageSubresourceLayout(): VkImageSubresource.aspectMask must have exactly 1 bit set.");
5054 }
5055
5056 IMAGE_STATE *image_entry = GetImageState(image);
5057 if (!image_entry) {
5058 return skip;
5059 }
5060
5061 // image must have been created with tiling equal to VK_IMAGE_TILING_LINEAR
5062 if (image_entry->createInfo.tiling != VK_IMAGE_TILING_LINEAR) {
5063 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image),
5064 "VUID-vkGetImageSubresourceLayout-image-00996",
5065 "vkGetImageSubresourceLayout(): Image must have tiling of VK_IMAGE_TILING_LINEAR.");
5066 }
5067
5068 // mipLevel must be less than the mipLevels specified in VkImageCreateInfo when the image was created
5069 if (pSubresource->mipLevel >= image_entry->createInfo.mipLevels) {
5070 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image),
5071 "VUID-vkGetImageSubresourceLayout-mipLevel-01716",
5072 "vkGetImageSubresourceLayout(): pSubresource.mipLevel (%d) must be less than %d.", pSubresource->mipLevel,
5073 image_entry->createInfo.mipLevels);
5074 }
5075
5076 // arrayLayer must be less than the arrayLayers specified in VkImageCreateInfo when the image was created
5077 if (pSubresource->arrayLayer >= image_entry->createInfo.arrayLayers) {
5078 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image),
5079 "VUID-vkGetImageSubresourceLayout-arrayLayer-01717",
5080 "vkGetImageSubresourceLayout(): pSubresource.arrayLayer (%d) must be less than %d.",
5081 pSubresource->arrayLayer, image_entry->createInfo.arrayLayers);
5082 }
5083
5084 // subresource's aspect must be compatible with image's format.
5085 const VkFormat img_format = image_entry->createInfo.format;
5086 if (FormatIsMultiplane(img_format)) {
5087 VkImageAspectFlags allowed_flags = (VK_IMAGE_ASPECT_PLANE_0_BIT_KHR | VK_IMAGE_ASPECT_PLANE_1_BIT_KHR);
5088 const char *vuid = "VUID-vkGetImageSubresourceLayout-format-01581"; // 2-plane version
5089 if (FormatPlaneCount(img_format) > 2u) {
5090 allowed_flags |= VK_IMAGE_ASPECT_PLANE_2_BIT_KHR;
5091 vuid = "VUID-vkGetImageSubresourceLayout-format-01582"; // 3-plane version
5092 }
5093 if (sub_aspect != (sub_aspect & allowed_flags)) {
5094 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
5095 HandleToUint64(image), vuid,
5096 "vkGetImageSubresourceLayout(): For multi-planar images, VkImageSubresource.aspectMask (0x%" PRIx32
5097 ") must be a single-plane specifier flag.",
5098 sub_aspect);
5099 }
5100 } else if (FormatIsColor(img_format)) {
5101 if (sub_aspect != VK_IMAGE_ASPECT_COLOR_BIT) {
5102 skip |= log_msg(
5103 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, HandleToUint64(image),
5104 "VUID-VkImageSubresource-aspectMask-parameter",
5105 "vkGetImageSubresourceLayout(): For color formats, VkImageSubresource.aspectMask must be VK_IMAGE_ASPECT_COLOR.");
5106 }
5107 } else if (FormatIsDepthOrStencil(img_format)) {
5108 if ((sub_aspect != VK_IMAGE_ASPECT_DEPTH_BIT) && (sub_aspect != VK_IMAGE_ASPECT_STENCIL_BIT)) {
5109 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
5110 HandleToUint64(image), "VUID-VkImageSubresource-aspectMask-parameter",
5111 "vkGetImageSubresourceLayout(): For depth/stencil formats, VkImageSubresource.aspectMask must be "
5112 "either VK_IMAGE_ASPECT_DEPTH_BIT or VK_IMAGE_ASPECT_STENCIL_BIT.");
5113 }
5114 }
5115
5116 if (GetDeviceExtensions()->vk_android_external_memory_android_hardware_buffer) {
5117 skip |= ValidateGetImageSubresourceLayoutANDROID(device_data, image);
5118 }
5119
5120 return skip;
5121 }
5122