1 /* Copyright (c) 2015-2016 The Khronos Group Inc.
2 * Copyright (c) 2015-2016 Valve Corporation
3 * Copyright (c) 2015-2016 LunarG, Inc.
4 * Copyright (C) 2015-2016 Google Inc.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Tobin Ehlis <tobine@google.com>
19 */
20
21 #include "descriptor_sets.h"
22 #include "vk_enum_string_helper.h"
23 #include "vk_safe_struct.h"
24 #include <sstream>
25
26 // Construct DescriptorSetLayout instance from given create info
DescriptorSetLayout(const VkDescriptorSetLayoutCreateInfo * p_create_info,const VkDescriptorSetLayout layout)27 cvdescriptorset::DescriptorSetLayout::DescriptorSetLayout(const VkDescriptorSetLayoutCreateInfo *p_create_info,
28 const VkDescriptorSetLayout layout)
29 : layout_(layout), binding_count_(p_create_info->bindingCount), descriptor_count_(0), dynamic_descriptor_count_(0) {
30 uint32_t global_index = 0;
31 for (uint32_t i = 0; i < binding_count_; ++i) {
32 descriptor_count_ += p_create_info->pBindings[i].descriptorCount;
33 binding_to_index_map_[p_create_info->pBindings[i].binding] = i;
34 binding_to_global_start_index_map_[p_create_info->pBindings[i].binding] = global_index;
35 global_index += p_create_info->pBindings[i].descriptorCount ? p_create_info->pBindings[i].descriptorCount - 1 : 0;
36 binding_to_global_end_index_map_[p_create_info->pBindings[i].binding] = global_index;
37 global_index += p_create_info->pBindings[i].descriptorCount ? 1 : 0;
38 bindings_.push_back(safe_VkDescriptorSetLayoutBinding(&p_create_info->pBindings[i]));
39 // In cases where we should ignore pImmutableSamplers make sure it's NULL
40 if ((p_create_info->pBindings[i].pImmutableSamplers) &&
41 ((p_create_info->pBindings[i].descriptorType != VK_DESCRIPTOR_TYPE_SAMPLER) &&
42 (p_create_info->pBindings[i].descriptorType != VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER))) {
43 bindings_.back().pImmutableSamplers = nullptr;
44 }
45 if (p_create_info->pBindings[i].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
46 p_create_info->pBindings[i].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) {
47 dynamic_descriptor_count_ += p_create_info->pBindings[i].descriptorCount;
48 }
49 }
50 }
51
52 // Validate descriptor set layout create info
ValidateCreateInfo(debug_report_data * report_data,const VkDescriptorSetLayoutCreateInfo * create_info)53 bool cvdescriptorset::DescriptorSetLayout::ValidateCreateInfo(debug_report_data *report_data,
54 const VkDescriptorSetLayoutCreateInfo *create_info) {
55 bool skip = false;
56 std::unordered_set<uint32_t> bindings;
57 for (uint32_t i = 0; i < create_info->bindingCount; ++i) {
58 if (!bindings.insert(create_info->pBindings[i].binding).second) {
59 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__,
60 VALIDATION_ERROR_02345, "DS", "duplicated binding number in VkDescriptorSetLayoutBinding. %s",
61 validation_error_map[VALIDATION_ERROR_02345]);
62 }
63 }
64 return skip;
65 }
66
67 // put all bindings into the given set
FillBindingSet(std::unordered_set<uint32_t> * binding_set) const68 void cvdescriptorset::DescriptorSetLayout::FillBindingSet(std::unordered_set<uint32_t> *binding_set) const {
69 for (auto binding_index_pair : binding_to_index_map_)
70 binding_set->insert(binding_index_pair.first);
71 }
72
73 VkDescriptorSetLayoutBinding const *
GetDescriptorSetLayoutBindingPtrFromBinding(const uint32_t binding) const74 cvdescriptorset::DescriptorSetLayout::GetDescriptorSetLayoutBindingPtrFromBinding(const uint32_t binding) const {
75 const auto &bi_itr = binding_to_index_map_.find(binding);
76 if (bi_itr != binding_to_index_map_.end()) {
77 return bindings_[bi_itr->second].ptr();
78 }
79 return nullptr;
80 }
81 VkDescriptorSetLayoutBinding const *
GetDescriptorSetLayoutBindingPtrFromIndex(const uint32_t index) const82 cvdescriptorset::DescriptorSetLayout::GetDescriptorSetLayoutBindingPtrFromIndex(const uint32_t index) const {
83 if (index >= bindings_.size())
84 return nullptr;
85 return bindings_[index].ptr();
86 }
87 // Return descriptorCount for given binding, 0 if index is unavailable
GetDescriptorCountFromBinding(const uint32_t binding) const88 uint32_t cvdescriptorset::DescriptorSetLayout::GetDescriptorCountFromBinding(const uint32_t binding) const {
89 const auto &bi_itr = binding_to_index_map_.find(binding);
90 if (bi_itr != binding_to_index_map_.end()) {
91 return bindings_[bi_itr->second].descriptorCount;
92 }
93 return 0;
94 }
95 // Return descriptorCount for given index, 0 if index is unavailable
GetDescriptorCountFromIndex(const uint32_t index) const96 uint32_t cvdescriptorset::DescriptorSetLayout::GetDescriptorCountFromIndex(const uint32_t index) const {
97 if (index >= bindings_.size())
98 return 0;
99 return bindings_[index].descriptorCount;
100 }
101 // For the given binding, return descriptorType
GetTypeFromBinding(const uint32_t binding) const102 VkDescriptorType cvdescriptorset::DescriptorSetLayout::GetTypeFromBinding(const uint32_t binding) const {
103 assert(binding_to_index_map_.count(binding));
104 const auto &bi_itr = binding_to_index_map_.find(binding);
105 if (bi_itr != binding_to_index_map_.end()) {
106 return bindings_[bi_itr->second].descriptorType;
107 }
108 return VK_DESCRIPTOR_TYPE_MAX_ENUM;
109 }
110 // For the given index, return descriptorType
GetTypeFromIndex(const uint32_t index) const111 VkDescriptorType cvdescriptorset::DescriptorSetLayout::GetTypeFromIndex(const uint32_t index) const {
112 assert(index < bindings_.size());
113 return bindings_[index].descriptorType;
114 }
115 // For the given global index, return descriptorType
116 // Currently just counting up through bindings_, may improve this in future
GetTypeFromGlobalIndex(const uint32_t index) const117 VkDescriptorType cvdescriptorset::DescriptorSetLayout::GetTypeFromGlobalIndex(const uint32_t index) const {
118 uint32_t global_offset = 0;
119 for (auto binding : bindings_) {
120 global_offset += binding.descriptorCount;
121 if (index < global_offset)
122 return binding.descriptorType;
123 }
124 assert(0); // requested global index is out of bounds
125 return VK_DESCRIPTOR_TYPE_MAX_ENUM;
126 }
127 // For the given binding, return stageFlags
GetStageFlagsFromBinding(const uint32_t binding) const128 VkShaderStageFlags cvdescriptorset::DescriptorSetLayout::GetStageFlagsFromBinding(const uint32_t binding) const {
129 assert(binding_to_index_map_.count(binding));
130 const auto &bi_itr = binding_to_index_map_.find(binding);
131 if (bi_itr != binding_to_index_map_.end()) {
132 return bindings_[bi_itr->second].stageFlags;
133 }
134 return VkShaderStageFlags(0);
135 }
136 // For the given binding, return start index
GetGlobalStartIndexFromBinding(const uint32_t binding) const137 uint32_t cvdescriptorset::DescriptorSetLayout::GetGlobalStartIndexFromBinding(const uint32_t binding) const {
138 assert(binding_to_global_start_index_map_.count(binding));
139 const auto &btgsi_itr = binding_to_global_start_index_map_.find(binding);
140 if (btgsi_itr != binding_to_global_start_index_map_.end()) {
141 return btgsi_itr->second;
142 }
143 // In error case max uint32_t so index is out of bounds to break ASAP
144 assert(0);
145 return 0xFFFFFFFF;
146 }
147 // For the given binding, return end index
GetGlobalEndIndexFromBinding(const uint32_t binding) const148 uint32_t cvdescriptorset::DescriptorSetLayout::GetGlobalEndIndexFromBinding(const uint32_t binding) const {
149 assert(binding_to_global_end_index_map_.count(binding));
150 const auto &btgei_itr = binding_to_global_end_index_map_.find(binding);
151 if (btgei_itr != binding_to_global_end_index_map_.end()) {
152 return btgei_itr->second;
153 }
154 // In error case max uint32_t so index is out of bounds to break ASAP
155 assert(0);
156 return 0xFFFFFFFF;
157 }
158 // For given binding, return ptr to ImmutableSampler array
GetImmutableSamplerPtrFromBinding(const uint32_t binding) const159 VkSampler const *cvdescriptorset::DescriptorSetLayout::GetImmutableSamplerPtrFromBinding(const uint32_t binding) const {
160 assert(binding_to_index_map_.count(binding));
161 const auto &bi_itr = binding_to_index_map_.find(binding);
162 if (bi_itr != binding_to_index_map_.end()) {
163 return bindings_[bi_itr->second].pImmutableSamplers;
164 }
165 return nullptr;
166 }
167 // For given index, return ptr to ImmutableSampler array
GetImmutableSamplerPtrFromIndex(const uint32_t index) const168 VkSampler const *cvdescriptorset::DescriptorSetLayout::GetImmutableSamplerPtrFromIndex(const uint32_t index) const {
169 assert(index < bindings_.size());
170 return bindings_[index].pImmutableSamplers;
171 }
172 // If our layout is compatible with rh_ds_layout, return true,
173 // else return false and fill in error_msg will description of what causes incompatibility
IsCompatible(const DescriptorSetLayout * rh_ds_layout,std::string * error_msg) const174 bool cvdescriptorset::DescriptorSetLayout::IsCompatible(const DescriptorSetLayout *rh_ds_layout, std::string *error_msg) const {
175 // Trivial case
176 if (layout_ == rh_ds_layout->GetDescriptorSetLayout())
177 return true;
178 if (descriptor_count_ != rh_ds_layout->descriptor_count_) {
179 std::stringstream error_str;
180 error_str << "DescriptorSetLayout " << layout_ << " has " << descriptor_count_ << " descriptors, but DescriptorSetLayout "
181 << rh_ds_layout->GetDescriptorSetLayout() << " has " << rh_ds_layout->descriptor_count_ << " descriptors.";
182 *error_msg = error_str.str();
183 return false; // trivial fail case
184 }
185 // Descriptor counts match so need to go through bindings one-by-one
186 // and verify that type and stageFlags match
187 for (auto binding : bindings_) {
188 // TODO : Do we also need to check immutable samplers?
189 // VkDescriptorSetLayoutBinding *rh_binding;
190 if (binding.descriptorCount != rh_ds_layout->GetDescriptorCountFromBinding(binding.binding)) {
191 std::stringstream error_str;
192 error_str << "Binding " << binding.binding << " for DescriptorSetLayout " << layout_ << " has a descriptorCount of "
193 << binding.descriptorCount << " but binding " << binding.binding << " for DescriptorSetLayout "
194 << rh_ds_layout->GetDescriptorSetLayout() << " has a descriptorCount of "
195 << rh_ds_layout->GetDescriptorCountFromBinding(binding.binding);
196 *error_msg = error_str.str();
197 return false;
198 } else if (binding.descriptorType != rh_ds_layout->GetTypeFromBinding(binding.binding)) {
199 std::stringstream error_str;
200 error_str << "Binding " << binding.binding << " for DescriptorSetLayout " << layout_ << " is type '"
201 << string_VkDescriptorType(binding.descriptorType) << "' but binding " << binding.binding
202 << " for DescriptorSetLayout " << rh_ds_layout->GetDescriptorSetLayout() << " is type '"
203 << string_VkDescriptorType(rh_ds_layout->GetTypeFromBinding(binding.binding)) << "'";
204 *error_msg = error_str.str();
205 return false;
206 } else if (binding.stageFlags != rh_ds_layout->GetStageFlagsFromBinding(binding.binding)) {
207 std::stringstream error_str;
208 error_str << "Binding " << binding.binding << " for DescriptorSetLayout " << layout_ << " has stageFlags "
209 << binding.stageFlags << " but binding " << binding.binding << " for DescriptorSetLayout "
210 << rh_ds_layout->GetDescriptorSetLayout() << " has stageFlags "
211 << rh_ds_layout->GetStageFlagsFromBinding(binding.binding);
212 *error_msg = error_str.str();
213 return false;
214 }
215 }
216 return true;
217 }
218
IsNextBindingConsistent(const uint32_t binding) const219 bool cvdescriptorset::DescriptorSetLayout::IsNextBindingConsistent(const uint32_t binding) const {
220 if (!binding_to_index_map_.count(binding + 1))
221 return false;
222 auto const &bi_itr = binding_to_index_map_.find(binding);
223 if (bi_itr != binding_to_index_map_.end()) {
224 const auto &next_bi_itr = binding_to_index_map_.find(binding + 1);
225 if (next_bi_itr != binding_to_index_map_.end()) {
226 auto type = bindings_[bi_itr->second].descriptorType;
227 auto stage_flags = bindings_[bi_itr->second].stageFlags;
228 auto immut_samp = bindings_[bi_itr->second].pImmutableSamplers ? true : false;
229 if ((type != bindings_[next_bi_itr->second].descriptorType) ||
230 (stage_flags != bindings_[next_bi_itr->second].stageFlags) ||
231 (immut_samp != (bindings_[next_bi_itr->second].pImmutableSamplers ? true : false))) {
232 return false;
233 }
234 return true;
235 }
236 }
237 return false;
238 }
239 // Starting at offset descriptor of given binding, parse over update_count
240 // descriptor updates and verify that for any binding boundaries that are crossed, the next binding(s) are all consistent
241 // Consistency means that their type, stage flags, and whether or not they use immutable samplers matches
242 // If so, return true. If not, fill in error_msg and return false
VerifyUpdateConsistency(uint32_t current_binding,uint32_t offset,uint32_t update_count,const char * type,const VkDescriptorSet set,std::string * error_msg) const243 bool cvdescriptorset::DescriptorSetLayout::VerifyUpdateConsistency(uint32_t current_binding, uint32_t offset, uint32_t update_count,
244 const char *type, const VkDescriptorSet set,
245 std::string *error_msg) const {
246 // Verify consecutive bindings match (if needed)
247 auto orig_binding = current_binding;
248 // Track count of descriptors in the current_bindings that are remaining to be updated
249 auto binding_remaining = GetDescriptorCountFromBinding(current_binding);
250 // First, it's legal to offset beyond your own binding so handle that case
251 // Really this is just searching for the binding in which the update begins and adjusting offset accordingly
252 while (offset >= binding_remaining) {
253 // Advance to next binding, decrement offset by binding size
254 offset -= binding_remaining;
255 binding_remaining = GetDescriptorCountFromBinding(++current_binding);
256 }
257 binding_remaining -= offset;
258 while (update_count > binding_remaining) { // While our updates overstep current binding
259 // Verify next consecutive binding matches type, stage flags & immutable sampler use
260 if (!IsNextBindingConsistent(current_binding++)) {
261 std::stringstream error_str;
262 error_str << "Attempting " << type << " descriptor set " << set << " binding #" << orig_binding << " with #"
263 << update_count << " descriptors being updated but this update oversteps the bounds of this binding and the "
264 "next binding is not consistent with current binding so this update is invalid.";
265 *error_msg = error_str.str();
266 return false;
267 }
268 // For sake of this check consider the bindings updated and grab count for next binding
269 update_count -= binding_remaining;
270 binding_remaining = GetDescriptorCountFromBinding(current_binding);
271 }
272 return true;
273 }
274
AllocateDescriptorSetsData(uint32_t count)275 cvdescriptorset::AllocateDescriptorSetsData::AllocateDescriptorSetsData(uint32_t count)
276 : required_descriptors_by_type{}, layout_nodes(count, nullptr) {}
277
DescriptorSet(const VkDescriptorSet set,const VkDescriptorPool pool,const DescriptorSetLayout * layout,const core_validation::layer_data * dev_data)278 cvdescriptorset::DescriptorSet::DescriptorSet(const VkDescriptorSet set, const VkDescriptorPool pool,
279 const DescriptorSetLayout *layout, const core_validation::layer_data *dev_data)
280 : some_update_(false), set_(set), pool_state_(nullptr), p_layout_(layout), device_data_(dev_data) {
281 pool_state_ = getDescriptorPoolState(dev_data, pool);
282 // Foreach binding, create default descriptors of given type
283 for (uint32_t i = 0; i < p_layout_->GetBindingCount(); ++i) {
284 auto type = p_layout_->GetTypeFromIndex(i);
285 switch (type) {
286 case VK_DESCRIPTOR_TYPE_SAMPLER: {
287 auto immut_sampler = p_layout_->GetImmutableSamplerPtrFromIndex(i);
288 for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di) {
289 if (immut_sampler)
290 descriptors_.emplace_back(new SamplerDescriptor(immut_sampler + di));
291 else
292 descriptors_.emplace_back(new SamplerDescriptor());
293 }
294 break;
295 }
296 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
297 auto immut = p_layout_->GetImmutableSamplerPtrFromIndex(i);
298 for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di) {
299 if (immut)
300 descriptors_.emplace_back(new ImageSamplerDescriptor(immut + di));
301 else
302 descriptors_.emplace_back(new ImageSamplerDescriptor());
303 }
304 break;
305 }
306 // ImageDescriptors
307 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
308 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
309 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
310 for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di)
311 descriptors_.emplace_back(new ImageDescriptor(type));
312 break;
313 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
314 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
315 for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di)
316 descriptors_.emplace_back(new TexelDescriptor(type));
317 break;
318 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
319 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
320 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
321 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
322 for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di)
323 descriptors_.emplace_back(new BufferDescriptor(type));
324 break;
325 default:
326 assert(0); // Bad descriptor type specified
327 break;
328 }
329 }
330 }
331
~DescriptorSet()332 cvdescriptorset::DescriptorSet::~DescriptorSet() {
333 InvalidateBoundCmdBuffers();
334 }
335
336
string_descriptor_req_view_type(descriptor_req req)337 static std::string string_descriptor_req_view_type(descriptor_req req) {
338 std::string result("");
339 for (unsigned i = 0; i <= VK_IMAGE_VIEW_TYPE_END_RANGE; i++) {
340 if (req & (1 << i)) {
341 if (result.size()) result += ", ";
342 result += string_VkImageViewType(VkImageViewType(i));
343 }
344 }
345
346 if (!result.size())
347 result = "(none)";
348
349 return result;
350 }
351
352
353 // Is this sets underlying layout compatible with passed in layout according to "Pipeline Layout Compatibility" in spec?
IsCompatible(const DescriptorSetLayout * layout,std::string * error) const354 bool cvdescriptorset::DescriptorSet::IsCompatible(const DescriptorSetLayout *layout, std::string *error) const {
355 return layout->IsCompatible(p_layout_, error);
356 }
357
358 // Validate that the state of this set is appropriate for the given bindings and dynamic_offsets at Draw time
359 // This includes validating that all descriptors in the given bindings are updated,
360 // that any update buffers are valid, and that any dynamic offsets are within the bounds of their buffers.
361 // Return true if state is acceptable, or false and write an error message into error string
ValidateDrawState(const std::map<uint32_t,descriptor_req> & bindings,const std::vector<uint32_t> & dynamic_offsets,std::string * error) const362 bool cvdescriptorset::DescriptorSet::ValidateDrawState(const std::map<uint32_t, descriptor_req> &bindings,
363 const std::vector<uint32_t> &dynamic_offsets, std::string *error) const {
364 auto dyn_offset_index = 0;
365 for (auto binding_pair : bindings) {
366 auto binding = binding_pair.first;
367 if (!p_layout_->HasBinding(binding)) {
368 std::stringstream error_str;
369 error_str << "Attempting to validate DrawState for binding #" << binding
370 << " which is an invalid binding for this descriptor set.";
371 *error = error_str.str();
372 return false;
373 }
374 auto start_idx = p_layout_->GetGlobalStartIndexFromBinding(binding);
375 if (descriptors_[start_idx]->IsImmutableSampler()) {
376 // Nothing to do for strictly immutable sampler
377 } else {
378 auto end_idx = p_layout_->GetGlobalEndIndexFromBinding(binding);
379 for (uint32_t i = start_idx; i <= end_idx; ++i) {
380 if (!descriptors_[i]->updated) {
381 std::stringstream error_str;
382 error_str << "Descriptor in binding #" << binding << " at global descriptor index " << i
383 << " is being used in draw but has not been updated.";
384 *error = error_str.str();
385 return false;
386 } else {
387 auto descriptor_class = descriptors_[i]->GetClass();
388 if (descriptor_class == GeneralBuffer) {
389 // Verify that buffers are valid
390 auto buffer = static_cast<BufferDescriptor *>(descriptors_[i].get())->GetBuffer();
391 auto buffer_node = getBufferNode(device_data_, buffer);
392 if (!buffer_node) {
393 std::stringstream error_str;
394 error_str << "Descriptor in binding #" << binding << " at global descriptor index " << i
395 << " references invalid buffer " << buffer << ".";
396 *error = error_str.str();
397 return false;
398 } else {
399 auto mem_entry = getMemObjInfo(device_data_, buffer_node->binding.mem);
400 if (!mem_entry) {
401 std::stringstream error_str;
402 error_str << "Descriptor in binding #" << binding << " at global descriptor index " << i
403 << " uses buffer " << buffer << " that references invalid memory "
404 << buffer_node->binding.mem << ".";
405 *error = error_str.str();
406 return false;
407 }
408 }
409 if (descriptors_[i]->IsDynamic()) {
410 // Validate that dynamic offsets are within the buffer
411 auto buffer_size = buffer_node->createInfo.size;
412 auto range = static_cast<BufferDescriptor *>(descriptors_[i].get())->GetRange();
413 auto desc_offset = static_cast<BufferDescriptor *>(descriptors_[i].get())->GetOffset();
414 auto dyn_offset = dynamic_offsets[dyn_offset_index++];
415 if (VK_WHOLE_SIZE == range) {
416 if ((dyn_offset + desc_offset) > buffer_size) {
417 std::stringstream error_str;
418 error_str << "Dynamic descriptor in binding #" << binding << " at global descriptor index " << i
419 << " uses buffer " << buffer
420 << " with update range of VK_WHOLE_SIZE has dynamic offset " << dyn_offset
421 << " combined with offset " << desc_offset << " that oversteps the buffer size of "
422 << buffer_size << ".";
423 *error = error_str.str();
424 return false;
425 }
426 } else {
427 if ((dyn_offset + desc_offset + range) > buffer_size) {
428 std::stringstream error_str;
429 error_str << "Dynamic descriptor in binding #" << binding << " at global descriptor index " << i
430 << " uses buffer " << buffer << " with dynamic offset " << dyn_offset
431 << " combined with offset " << desc_offset << " and range " << range
432 << " that oversteps the buffer size of " << buffer_size << ".";
433 *error = error_str.str();
434 return false;
435 }
436 }
437 }
438 }
439 else if (descriptor_class == ImageSampler || descriptor_class == Image) {
440 auto image_view = (descriptor_class == ImageSampler)
441 ? static_cast<ImageSamplerDescriptor *>(descriptors_[i].get())->GetImageView()
442 : static_cast<ImageDescriptor *>(descriptors_[i].get())->GetImageView();
443 auto reqs = binding_pair.second;
444
445 auto image_view_state = getImageViewState(device_data_, image_view);
446 assert(image_view_state);
447 auto image_view_ci = image_view_state->create_info;
448
449 if ((reqs & DESCRIPTOR_REQ_ALL_VIEW_TYPE_BITS) && (~reqs & (1 << image_view_ci.viewType))) {
450 // bad view type
451 std::stringstream error_str;
452 error_str << "Descriptor in binding #" << binding << " at global descriptor index " << i
453 << " requires an image view of type " << string_descriptor_req_view_type(reqs)
454 << " but got " << string_VkImageViewType(image_view_ci.viewType) << ".";
455 *error = error_str.str();
456 return false;
457 }
458
459 auto image_node = getImageState(device_data_, image_view_ci.image);
460 assert(image_node);
461
462 if ((reqs & DESCRIPTOR_REQ_SINGLE_SAMPLE) &&
463 image_node->createInfo.samples != VK_SAMPLE_COUNT_1_BIT) {
464 std::stringstream error_str;
465 error_str << "Descriptor in binding #" << binding << " at global descriptor index " << i
466 << " requires bound image to have VK_SAMPLE_COUNT_1_BIT but got "
467 << string_VkSampleCountFlagBits(image_node->createInfo.samples) << ".";
468 *error = error_str.str();
469 return false;
470 }
471
472 if ((reqs & DESCRIPTOR_REQ_MULTI_SAMPLE) &&
473 image_node->createInfo.samples == VK_SAMPLE_COUNT_1_BIT) {
474 std::stringstream error_str;
475 error_str << "Descriptor in binding #" << binding << " at global descriptor index " << i
476 << " requires bound image to have multiple samples, but got VK_SAMPLE_COUNT_1_BIT.";
477 *error = error_str.str();
478 return false;
479 }
480 }
481 }
482 }
483 }
484 }
485 return true;
486 }
487
488 // For given bindings, place any update buffers or images into the passed-in unordered_sets
GetStorageUpdates(const std::map<uint32_t,descriptor_req> & bindings,std::unordered_set<VkBuffer> * buffer_set,std::unordered_set<VkImageView> * image_set) const489 uint32_t cvdescriptorset::DescriptorSet::GetStorageUpdates(const std::map<uint32_t, descriptor_req> &bindings,
490 std::unordered_set<VkBuffer> *buffer_set,
491 std::unordered_set<VkImageView> *image_set) const {
492 auto num_updates = 0;
493 for (auto binding_pair : bindings) {
494 auto binding = binding_pair.first;
495 // If a binding doesn't exist, skip it
496 if (!p_layout_->HasBinding(binding)) {
497 continue;
498 }
499 auto start_idx = p_layout_->GetGlobalStartIndexFromBinding(binding);
500 if (descriptors_[start_idx]->IsStorage()) {
501 if (Image == descriptors_[start_idx]->descriptor_class) {
502 for (uint32_t i = 0; i < p_layout_->GetDescriptorCountFromBinding(binding); ++i) {
503 if (descriptors_[start_idx + i]->updated) {
504 image_set->insert(static_cast<ImageDescriptor *>(descriptors_[start_idx + i].get())->GetImageView());
505 num_updates++;
506 }
507 }
508 } else if (TexelBuffer == descriptors_[start_idx]->descriptor_class) {
509 for (uint32_t i = 0; i < p_layout_->GetDescriptorCountFromBinding(binding); ++i) {
510 if (descriptors_[start_idx + i]->updated) {
511 auto bufferview = static_cast<TexelDescriptor *>(descriptors_[start_idx + i].get())->GetBufferView();
512 auto bv_state = getBufferViewState(device_data_, bufferview);
513 if (bv_state) {
514 buffer_set->insert(bv_state->create_info.buffer);
515 num_updates++;
516 }
517 }
518 }
519 } else if (GeneralBuffer == descriptors_[start_idx]->descriptor_class) {
520 for (uint32_t i = 0; i < p_layout_->GetDescriptorCountFromBinding(binding); ++i) {
521 if (descriptors_[start_idx + i]->updated) {
522 buffer_set->insert(static_cast<BufferDescriptor *>(descriptors_[start_idx + i].get())->GetBuffer());
523 num_updates++;
524 }
525 }
526 }
527 }
528 }
529 return num_updates;
530 }
531 // Set is being deleted or updates so invalidate all bound cmd buffers
InvalidateBoundCmdBuffers()532 void cvdescriptorset::DescriptorSet::InvalidateBoundCmdBuffers() {
533 core_validation::invalidateCommandBuffers(cb_bindings,
534 {reinterpret_cast<uint64_t &>(set_), VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT});
535 }
536 // Perform write update in given update struct
PerformWriteUpdate(const VkWriteDescriptorSet * update)537 void cvdescriptorset::DescriptorSet::PerformWriteUpdate(const VkWriteDescriptorSet *update) {
538 auto start_idx = p_layout_->GetGlobalStartIndexFromBinding(update->dstBinding) + update->dstArrayElement;
539 // perform update
540 for (uint32_t di = 0; di < update->descriptorCount; ++di) {
541 descriptors_[start_idx + di]->WriteUpdate(update, di);
542 }
543 if (update->descriptorCount)
544 some_update_ = true;
545
546 InvalidateBoundCmdBuffers();
547 }
548 // Validate Copy update
ValidateCopyUpdate(const debug_report_data * report_data,const VkCopyDescriptorSet * update,const DescriptorSet * src_set,UNIQUE_VALIDATION_ERROR_CODE * error_code,std::string * error_msg)549 bool cvdescriptorset::DescriptorSet::ValidateCopyUpdate(const debug_report_data *report_data, const VkCopyDescriptorSet *update,
550 const DescriptorSet *src_set, UNIQUE_VALIDATION_ERROR_CODE *error_code,
551 std::string *error_msg) {
552 // Verify idle ds
553 if (in_use.load()) {
554 // TODO : Re-using Allocate Idle error code, need copy update idle error code
555 *error_code = VALIDATION_ERROR_00919;
556 std::stringstream error_str;
557 error_str << "Cannot call vkUpdateDescriptorSets() to perform copy update on descriptor set " << set_
558 << " that is in use by a command buffer.";
559 *error_msg = error_str.str();
560 return false;
561 }
562 if (!p_layout_->HasBinding(update->dstBinding)) {
563 *error_code = VALIDATION_ERROR_00966;
564 std::stringstream error_str;
565 error_str << "DescriptorSet " << set_ << " does not have copy update dest binding of " << update->dstBinding << ".";
566 *error_msg = error_str.str();
567 return false;
568 }
569 if (!src_set->HasBinding(update->srcBinding)) {
570 *error_code = VALIDATION_ERROR_00964;
571 std::stringstream error_str;
572 error_str << "DescriptorSet " << set_ << " does not have copy update src binding of " << update->srcBinding << ".";
573 *error_msg = error_str.str();
574 return false;
575 }
576 // src & dst set bindings are valid
577 // Check bounds of src & dst
578 auto src_start_idx = src_set->GetGlobalStartIndexFromBinding(update->srcBinding) + update->srcArrayElement;
579 if ((src_start_idx + update->descriptorCount) > src_set->GetTotalDescriptorCount()) {
580 // SRC update out of bounds
581 *error_code = VALIDATION_ERROR_00965;
582 std::stringstream error_str;
583 error_str << "Attempting copy update from descriptorSet " << update->srcSet << " binding#" << update->srcBinding
584 << " with offset index of " << src_set->GetGlobalStartIndexFromBinding(update->srcBinding)
585 << " plus update array offset of " << update->srcArrayElement << " and update of " << update->descriptorCount
586 << " descriptors oversteps total number of descriptors in set: " << src_set->GetTotalDescriptorCount() << ".";
587 *error_msg = error_str.str();
588 return false;
589 }
590 auto dst_start_idx = p_layout_->GetGlobalStartIndexFromBinding(update->dstBinding) + update->dstArrayElement;
591 if ((dst_start_idx + update->descriptorCount) > p_layout_->GetTotalDescriptorCount()) {
592 // DST update out of bounds
593 *error_code = VALIDATION_ERROR_00967;
594 std::stringstream error_str;
595 error_str << "Attempting copy update to descriptorSet " << set_ << " binding#" << update->dstBinding
596 << " with offset index of " << p_layout_->GetGlobalStartIndexFromBinding(update->dstBinding)
597 << " plus update array offset of " << update->dstArrayElement << " and update of " << update->descriptorCount
598 << " descriptors oversteps total number of descriptors in set: " << p_layout_->GetTotalDescriptorCount() << ".";
599 *error_msg = error_str.str();
600 return false;
601 }
602 // Check that types match
603 // TODO : Base default error case going from here is VALIDATION_ERROR_00968 which covers all consistency issues, need more
604 // fine-grained error codes
605 *error_code = VALIDATION_ERROR_00968;
606 auto src_type = src_set->GetTypeFromBinding(update->srcBinding);
607 auto dst_type = p_layout_->GetTypeFromBinding(update->dstBinding);
608 if (src_type != dst_type) {
609 std::stringstream error_str;
610 error_str << "Attempting copy update to descriptorSet " << set_ << " binding #" << update->dstBinding << " with type "
611 << string_VkDescriptorType(dst_type) << " from descriptorSet " << src_set->GetSet() << " binding #"
612 << update->srcBinding << " with type " << string_VkDescriptorType(src_type) << ". Types do not match.";
613 *error_msg = error_str.str();
614 return false;
615 }
616 // Verify consistency of src & dst bindings if update crosses binding boundaries
617 if ((!src_set->GetLayout()->VerifyUpdateConsistency(update->srcBinding, update->srcArrayElement, update->descriptorCount,
618 "copy update from", src_set->GetSet(), error_msg)) ||
619 (!p_layout_->VerifyUpdateConsistency(update->dstBinding, update->dstArrayElement, update->descriptorCount, "copy update to",
620 set_, error_msg))) {
621 return false;
622 }
623 // First make sure source descriptors are updated
624 for (uint32_t i = 0; i < update->descriptorCount; ++i) {
625 if (!src_set->descriptors_[src_start_idx + i]) {
626 std::stringstream error_str;
627 error_str << "Attempting copy update from descriptorSet " << src_set << " binding #" << update->srcBinding << " but descriptor at array offset "
628 << update->srcArrayElement + i << " has not been updated.";
629 *error_msg = error_str.str();
630 return false;
631 }
632 }
633 // Update parameters all look good and descriptor updated so verify update contents
634 if (!VerifyCopyUpdateContents(update, src_set, src_type, src_start_idx, error_code, error_msg))
635 return false;
636
637 // All checks passed so update is good
638 return true;
639 }
640 // Perform Copy update
PerformCopyUpdate(const VkCopyDescriptorSet * update,const DescriptorSet * src_set)641 void cvdescriptorset::DescriptorSet::PerformCopyUpdate(const VkCopyDescriptorSet *update, const DescriptorSet *src_set) {
642 auto src_start_idx = src_set->GetGlobalStartIndexFromBinding(update->srcBinding) + update->srcArrayElement;
643 auto dst_start_idx = p_layout_->GetGlobalStartIndexFromBinding(update->dstBinding) + update->dstArrayElement;
644 // Update parameters all look good so perform update
645 for (uint32_t di = 0; di < update->descriptorCount; ++di) {
646 descriptors_[dst_start_idx + di]->CopyUpdate(src_set->descriptors_[src_start_idx + di].get());
647 }
648 if (update->descriptorCount)
649 some_update_ = true;
650
651 InvalidateBoundCmdBuffers();
652 }
653
654 // Bind cb_node to this set and this set to cb_node.
655 // Prereq: This should be called for a set that has been confirmed to be active for the given cb_node, meaning it's going
656 // to be used in a draw by the given cb_node
BindCommandBuffer(GLOBAL_CB_NODE * cb_node,const std::unordered_set<uint32_t> & bindings)657 void cvdescriptorset::DescriptorSet::BindCommandBuffer(GLOBAL_CB_NODE *cb_node, const std::unordered_set<uint32_t> &bindings) {
658 // bind cb to this descriptor set
659 cb_bindings.insert(cb_node);
660 // Add bindings for descriptor set, the set's pool, and individual objects in the set
661 cb_node->object_bindings.insert({reinterpret_cast<uint64_t &>(set_), VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT});
662 pool_state_->cb_bindings.insert(cb_node);
663 cb_node->object_bindings.insert(
664 {reinterpret_cast<uint64_t &>(pool_state_->pool), VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT});
665 // For the active slots, use set# to look up descriptorSet from boundDescriptorSets, and bind all of that descriptor set's
666 // resources
667 for (auto binding : bindings) {
668 auto start_idx = p_layout_->GetGlobalStartIndexFromBinding(binding);
669 auto end_idx = p_layout_->GetGlobalEndIndexFromBinding(binding);
670 for (uint32_t i = start_idx; i <= end_idx; ++i) {
671 descriptors_[i]->BindCommandBuffer(device_data_, cb_node);
672 }
673 }
674 }
675
SamplerDescriptor()676 cvdescriptorset::SamplerDescriptor::SamplerDescriptor() : sampler_(VK_NULL_HANDLE), immutable_(false) {
677 updated = false;
678 descriptor_class = PlainSampler;
679 };
680
SamplerDescriptor(const VkSampler * immut)681 cvdescriptorset::SamplerDescriptor::SamplerDescriptor(const VkSampler *immut) : sampler_(VK_NULL_HANDLE), immutable_(false) {
682 updated = false;
683 descriptor_class = PlainSampler;
684 if (immut) {
685 sampler_ = *immut;
686 immutable_ = true;
687 updated = true;
688 }
689 }
690 // Validate given sampler. Currently this only checks to make sure it exists in the samplerMap
ValidateSampler(const VkSampler sampler,const core_validation::layer_data * dev_data)691 bool cvdescriptorset::ValidateSampler(const VkSampler sampler, const core_validation::layer_data *dev_data) {
692 return (getSamplerState(dev_data, sampler) != nullptr);
693 }
694
ValidateImageUpdate(VkImageView image_view,VkImageLayout image_layout,VkDescriptorType type,const core_validation::layer_data * dev_data,UNIQUE_VALIDATION_ERROR_CODE * error_code,std::string * error_msg)695 bool cvdescriptorset::ValidateImageUpdate(VkImageView image_view, VkImageLayout image_layout, VkDescriptorType type,
696 const core_validation::layer_data *dev_data, UNIQUE_VALIDATION_ERROR_CODE *error_code,
697 std::string *error_msg) {
698 // TODO : Defaulting to 00943 for all cases here. Need to create new error codes for various cases.
699 *error_code = VALIDATION_ERROR_00943;
700 auto iv_state = getImageViewState(dev_data, image_view);
701 if (!iv_state) {
702 std::stringstream error_str;
703 error_str << "Invalid VkImageView: " << image_view;
704 *error_msg = error_str.str();
705 return false;
706 }
707 // Note that when an imageview is created, we validated that memory is bound so no need to re-check here
708 // Validate that imageLayout is compatible with aspect_mask and image format
709 // and validate that image usage bits are correct for given usage
710 VkImageAspectFlags aspect_mask = iv_state->create_info.subresourceRange.aspectMask;
711 VkImage image = iv_state->create_info.image;
712 VkFormat format = VK_FORMAT_MAX_ENUM;
713 VkImageUsageFlags usage = 0;
714 auto image_node = getImageState(dev_data, image);
715 if (image_node) {
716 format = image_node->createInfo.format;
717 usage = image_node->createInfo.usage;
718 // Validate that memory is bound to image
719 if (ValidateMemoryIsBoundToImage(dev_data, image_node, "vkUpdateDescriptorSets()")) {
720 // TODO : Need new code(s) for language in 11.6 Memory Association
721 *error_msg = "No memory bound to image.";
722 return false;
723 }
724 } else {
725 // Also need to check the swapchains.
726 auto swapchain = getSwapchainFromImage(dev_data, image);
727 if (swapchain) {
728 auto swapchain_node = getSwapchainNode(dev_data, swapchain);
729 if (swapchain_node) {
730 format = swapchain_node->createInfo.imageFormat;
731 }
732 }
733 }
734 // First validate that format and layout are compatible
735 if (format == VK_FORMAT_MAX_ENUM) {
736 std::stringstream error_str;
737 error_str << "Invalid image (" << image << ") in imageView (" << image_view << ").";
738 *error_msg = error_str.str();
739 return false;
740 }
741 // TODO : The various image aspect and format checks here are based on general spec language in 11.5 Image Views section under
742 // vkCreateImageView(). What's the best way to create unique id for these cases?
743 bool ds = vk_format_is_depth_or_stencil(format);
744 switch (image_layout) {
745 case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
746 // Only Color bit must be set
747 if ((aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) != VK_IMAGE_ASPECT_COLOR_BIT) {
748 std::stringstream error_str;
749 error_str << "ImageView (" << image_view << ") uses layout VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL but does "
750 "not have VK_IMAGE_ASPECT_COLOR_BIT set.";
751 *error_msg = error_str.str();
752 return false;
753 }
754 // format must NOT be DS
755 if (ds) {
756 std::stringstream error_str;
757 error_str << "ImageView (" << image_view
758 << ") uses layout VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL but the image format is "
759 << string_VkFormat(format) << " which is not a color format.";
760 *error_msg = error_str.str();
761 return false;
762 }
763 break;
764 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
765 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
766 // Depth or stencil bit must be set, but both must NOT be set
767 if (aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) {
768 if (aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) {
769 // both must NOT be set
770 std::stringstream error_str;
771 error_str << "ImageView (" << image_view << ") has both STENCIL and DEPTH aspects set";
772 *error_msg = error_str.str();
773 return false;
774 }
775 } else if (!(aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT)) {
776 // Neither were set
777 std::stringstream error_str;
778 error_str << "ImageView (" << image_view << ") has layout " << string_VkImageLayout(image_layout)
779 << " but does not have STENCIL or DEPTH aspects set";
780 *error_msg = error_str.str();
781 return false;
782 }
783 // format must be DS
784 if (!ds) {
785 std::stringstream error_str;
786 error_str << "ImageView (" << image_view << ") has layout " << string_VkImageLayout(image_layout)
787 << " but the image format is " << string_VkFormat(format) << " which is not a depth/stencil format.";
788 *error_msg = error_str.str();
789 return false;
790 }
791 break;
792 default:
793 // For other layouts if the source is depth/stencil image, both aspect bits must not be set
794 if (ds) {
795 if (aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) {
796 if (aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) {
797 // both must NOT be set
798 std::stringstream error_str;
799 error_str << "ImageView (" << image_view << ") has layout " << string_VkImageLayout(image_layout)
800 << " and is using depth/stencil image of format " << string_VkFormat(format)
801 << " but it has both STENCIL and DEPTH aspects set, which is illegal. When using a depth/stencil "
802 "image in a descriptor set, please only set either VK_IMAGE_ASPECT_DEPTH_BIT or "
803 "VK_IMAGE_ASPECT_STENCIL_BIT depending on whether it will be used for depth reads or stencil "
804 "reads respectively.";
805 *error_msg = error_str.str();
806 return false;
807 }
808 }
809 }
810 break;
811 }
812 // Now validate that usage flags are correctly set for given type of update
813 // As we're switching per-type, if any type has specific layout requirements, check those here as well
814 // TODO : The various image usage bit requirements are in general spec language for VkImageUsageFlags bit block in 11.3 Images
815 // under vkCreateImage()
816 // TODO : Need to also validate case VALIDATION_ERROR_00952 where STORAGE_IMAGE & INPUT_ATTACH types must have been created with
817 // identify swizzle
818 std::string error_usage_bit;
819 switch (type) {
820 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
821 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
822 if (!(usage & VK_IMAGE_USAGE_SAMPLED_BIT)) {
823 error_usage_bit = "VK_IMAGE_USAGE_SAMPLED_BIT";
824 }
825 break;
826 }
827 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: {
828 if (!(usage & VK_IMAGE_USAGE_STORAGE_BIT)) {
829 error_usage_bit = "VK_IMAGE_USAGE_STORAGE_BIT";
830 } else if (VK_IMAGE_LAYOUT_GENERAL != image_layout) {
831 std::stringstream error_str;
832 // TODO : Need to create custom enum error code for this case
833 error_str << "ImageView (" << image_view << ") of VK_DESCRIPTOR_TYPE_STORAGE_IMAGE type is being updated with layout "
834 << string_VkImageLayout(image_layout)
835 << " but according to spec section 13.1 Descriptor Types, 'Load and store operations on storage images can "
836 "only be done on images in VK_IMAGE_LAYOUT_GENERAL layout.'";
837 *error_msg = error_str.str();
838 return false;
839 }
840 break;
841 }
842 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
843 if (!(usage & VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT)) {
844 error_usage_bit = "VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT";
845 }
846 break;
847 }
848 default:
849 break;
850 }
851 if (!error_usage_bit.empty()) {
852 std::stringstream error_str;
853 error_str << "ImageView (" << image_view << ") with usage mask 0x" << usage
854 << " being used for a descriptor update of type " << string_VkDescriptorType(type) << " does not have "
855 << error_usage_bit << " set.";
856 *error_msg = error_str.str();
857 return false;
858 }
859 return true;
860 }
861
WriteUpdate(const VkWriteDescriptorSet * update,const uint32_t index)862 void cvdescriptorset::SamplerDescriptor::WriteUpdate(const VkWriteDescriptorSet *update, const uint32_t index) {
863 sampler_ = update->pImageInfo[index].sampler;
864 updated = true;
865 }
866
CopyUpdate(const Descriptor * src)867 void cvdescriptorset::SamplerDescriptor::CopyUpdate(const Descriptor *src) {
868 if (!immutable_) {
869 auto update_sampler = static_cast<const SamplerDescriptor *>(src)->sampler_;
870 sampler_ = update_sampler;
871 }
872 updated = true;
873 }
874
BindCommandBuffer(const core_validation::layer_data * dev_data,GLOBAL_CB_NODE * cb_node)875 void cvdescriptorset::SamplerDescriptor::BindCommandBuffer(const core_validation::layer_data *dev_data, GLOBAL_CB_NODE *cb_node) {
876 if (!immutable_) {
877 auto sampler_state = getSamplerState(dev_data, sampler_);
878 if (sampler_state)
879 core_validation::AddCommandBufferBindingSampler(cb_node, sampler_state);
880 }
881 }
882
ImageSamplerDescriptor()883 cvdescriptorset::ImageSamplerDescriptor::ImageSamplerDescriptor()
884 : sampler_(VK_NULL_HANDLE), immutable_(false), image_view_(VK_NULL_HANDLE), image_layout_(VK_IMAGE_LAYOUT_UNDEFINED) {
885 updated = false;
886 descriptor_class = ImageSampler;
887 }
888
ImageSamplerDescriptor(const VkSampler * immut)889 cvdescriptorset::ImageSamplerDescriptor::ImageSamplerDescriptor(const VkSampler *immut)
890 : sampler_(VK_NULL_HANDLE), immutable_(true), image_view_(VK_NULL_HANDLE), image_layout_(VK_IMAGE_LAYOUT_UNDEFINED) {
891 updated = false;
892 descriptor_class = ImageSampler;
893 if (immut) {
894 sampler_ = *immut;
895 immutable_ = true;
896 updated = true;
897 }
898 }
899
WriteUpdate(const VkWriteDescriptorSet * update,const uint32_t index)900 void cvdescriptorset::ImageSamplerDescriptor::WriteUpdate(const VkWriteDescriptorSet *update, const uint32_t index) {
901 updated = true;
902 const auto &image_info = update->pImageInfo[index];
903 sampler_ = image_info.sampler;
904 image_view_ = image_info.imageView;
905 image_layout_ = image_info.imageLayout;
906 }
907
CopyUpdate(const Descriptor * src)908 void cvdescriptorset::ImageSamplerDescriptor::CopyUpdate(const Descriptor *src) {
909 if (!immutable_) {
910 auto update_sampler = static_cast<const ImageSamplerDescriptor *>(src)->sampler_;
911 sampler_ = update_sampler;
912 }
913 auto image_view = static_cast<const ImageSamplerDescriptor *>(src)->image_view_;
914 auto image_layout = static_cast<const ImageSamplerDescriptor *>(src)->image_layout_;
915 updated = true;
916 image_view_ = image_view;
917 image_layout_ = image_layout;
918 }
919
BindCommandBuffer(const core_validation::layer_data * dev_data,GLOBAL_CB_NODE * cb_node)920 void cvdescriptorset::ImageSamplerDescriptor::BindCommandBuffer(const core_validation::layer_data *dev_data,
921 GLOBAL_CB_NODE *cb_node) {
922 // First add binding for any non-immutable sampler
923 if (!immutable_) {
924 auto sampler_state = getSamplerState(dev_data, sampler_);
925 if (sampler_state)
926 core_validation::AddCommandBufferBindingSampler(cb_node, sampler_state);
927 }
928 // Add binding for image
929 auto iv_state = getImageViewState(dev_data, image_view_);
930 if (iv_state) {
931 core_validation::AddCommandBufferBindingImageView(dev_data, cb_node, iv_state);
932 }
933 }
934
ImageDescriptor(const VkDescriptorType type)935 cvdescriptorset::ImageDescriptor::ImageDescriptor(const VkDescriptorType type)
936 : storage_(false), image_view_(VK_NULL_HANDLE), image_layout_(VK_IMAGE_LAYOUT_UNDEFINED) {
937 updated = false;
938 descriptor_class = Image;
939 if (VK_DESCRIPTOR_TYPE_STORAGE_IMAGE == type)
940 storage_ = true;
941 };
942
WriteUpdate(const VkWriteDescriptorSet * update,const uint32_t index)943 void cvdescriptorset::ImageDescriptor::WriteUpdate(const VkWriteDescriptorSet *update, const uint32_t index) {
944 updated = true;
945 const auto &image_info = update->pImageInfo[index];
946 image_view_ = image_info.imageView;
947 image_layout_ = image_info.imageLayout;
948 }
949
CopyUpdate(const Descriptor * src)950 void cvdescriptorset::ImageDescriptor::CopyUpdate(const Descriptor *src) {
951 auto image_view = static_cast<const ImageDescriptor *>(src)->image_view_;
952 auto image_layout = static_cast<const ImageDescriptor *>(src)->image_layout_;
953 updated = true;
954 image_view_ = image_view;
955 image_layout_ = image_layout;
956 }
957
BindCommandBuffer(const core_validation::layer_data * dev_data,GLOBAL_CB_NODE * cb_node)958 void cvdescriptorset::ImageDescriptor::BindCommandBuffer(const core_validation::layer_data *dev_data, GLOBAL_CB_NODE *cb_node) {
959 // Add binding for image
960 auto iv_state = getImageViewState(dev_data, image_view_);
961 if (iv_state) {
962 core_validation::AddCommandBufferBindingImageView(dev_data, cb_node, iv_state);
963 }
964 }
965
BufferDescriptor(const VkDescriptorType type)966 cvdescriptorset::BufferDescriptor::BufferDescriptor(const VkDescriptorType type)
967 : storage_(false), dynamic_(false), buffer_(VK_NULL_HANDLE), offset_(0), range_(0) {
968 updated = false;
969 descriptor_class = GeneralBuffer;
970 if (VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC == type) {
971 dynamic_ = true;
972 } else if (VK_DESCRIPTOR_TYPE_STORAGE_BUFFER == type) {
973 storage_ = true;
974 } else if (VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC == type) {
975 dynamic_ = true;
976 storage_ = true;
977 }
978 }
WriteUpdate(const VkWriteDescriptorSet * update,const uint32_t index)979 void cvdescriptorset::BufferDescriptor::WriteUpdate(const VkWriteDescriptorSet *update, const uint32_t index) {
980 updated = true;
981 const auto &buffer_info = update->pBufferInfo[index];
982 buffer_ = buffer_info.buffer;
983 offset_ = buffer_info.offset;
984 range_ = buffer_info.range;
985 }
986
CopyUpdate(const Descriptor * src)987 void cvdescriptorset::BufferDescriptor::CopyUpdate(const Descriptor *src) {
988 auto buff_desc = static_cast<const BufferDescriptor *>(src);
989 updated = true;
990 buffer_ = buff_desc->buffer_;
991 offset_ = buff_desc->offset_;
992 range_ = buff_desc->range_;
993 }
994
BindCommandBuffer(const core_validation::layer_data * dev_data,GLOBAL_CB_NODE * cb_node)995 void cvdescriptorset::BufferDescriptor::BindCommandBuffer(const core_validation::layer_data *dev_data, GLOBAL_CB_NODE *cb_node) {
996 auto buffer_node = getBufferNode(dev_data, buffer_);
997 if (buffer_node)
998 core_validation::AddCommandBufferBindingBuffer(dev_data, cb_node, buffer_node);
999 }
1000
TexelDescriptor(const VkDescriptorType type)1001 cvdescriptorset::TexelDescriptor::TexelDescriptor(const VkDescriptorType type) : buffer_view_(VK_NULL_HANDLE), storage_(false) {
1002 updated = false;
1003 descriptor_class = TexelBuffer;
1004 if (VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER == type)
1005 storage_ = true;
1006 };
1007
WriteUpdate(const VkWriteDescriptorSet * update,const uint32_t index)1008 void cvdescriptorset::TexelDescriptor::WriteUpdate(const VkWriteDescriptorSet *update, const uint32_t index) {
1009 updated = true;
1010 buffer_view_ = update->pTexelBufferView[index];
1011 }
1012
CopyUpdate(const Descriptor * src)1013 void cvdescriptorset::TexelDescriptor::CopyUpdate(const Descriptor *src) {
1014 updated = true;
1015 buffer_view_ = static_cast<const TexelDescriptor *>(src)->buffer_view_;
1016 }
1017
BindCommandBuffer(const core_validation::layer_data * dev_data,GLOBAL_CB_NODE * cb_node)1018 void cvdescriptorset::TexelDescriptor::BindCommandBuffer(const core_validation::layer_data *dev_data, GLOBAL_CB_NODE *cb_node) {
1019 auto bv_state = getBufferViewState(dev_data, buffer_view_);
1020 if (bv_state) {
1021 core_validation::AddCommandBufferBindingBufferView(dev_data, cb_node, bv_state);
1022 }
1023 }
1024
1025 // This is a helper function that iterates over a set of Write and Copy updates, pulls the DescriptorSet* for updated
1026 // sets, and then calls their respective Validate[Write|Copy]Update functions.
1027 // If the update hits an issue for which the callback returns "true", meaning that the call down the chain should
1028 // be skipped, then true is returned.
1029 // If there is no issue with the update, then false is returned.
ValidateUpdateDescriptorSets(const debug_report_data * report_data,const core_validation::layer_data * dev_data,uint32_t write_count,const VkWriteDescriptorSet * p_wds,uint32_t copy_count,const VkCopyDescriptorSet * p_cds)1030 bool cvdescriptorset::ValidateUpdateDescriptorSets(const debug_report_data *report_data,
1031 const core_validation::layer_data *dev_data, uint32_t write_count,
1032 const VkWriteDescriptorSet *p_wds, uint32_t copy_count,
1033 const VkCopyDescriptorSet *p_cds) {
1034 bool skip_call = false;
1035 // Validate Write updates
1036 for (uint32_t i = 0; i < write_count; i++) {
1037 auto dest_set = p_wds[i].dstSet;
1038 auto set_node = core_validation::getSetNode(dev_data, dest_set);
1039 if (!set_node) {
1040 skip_call |=
1041 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
1042 reinterpret_cast<uint64_t &>(dest_set), __LINE__, DRAWSTATE_INVALID_DESCRIPTOR_SET, "DS",
1043 "Cannot call vkUpdateDescriptorSets() on descriptor set 0x%" PRIxLEAST64 " that has not been allocated.",
1044 reinterpret_cast<uint64_t &>(dest_set));
1045 } else {
1046 UNIQUE_VALIDATION_ERROR_CODE error_code;
1047 std::string error_str;
1048 if (!set_node->ValidateWriteUpdate(report_data, &p_wds[i], &error_code, &error_str)) {
1049 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
1050 reinterpret_cast<uint64_t &>(dest_set), __LINE__, error_code, "DS",
1051 "vkUpdateDescriptorsSets() failed write update validation for Descriptor Set 0x%" PRIx64
1052 " with error: %s. %s",
1053 reinterpret_cast<uint64_t &>(dest_set), error_str.c_str(), validation_error_map[error_code]);
1054 }
1055 }
1056 }
1057 // Now validate copy updates
1058 for (uint32_t i = 0; i < copy_count; ++i) {
1059 auto dst_set = p_cds[i].dstSet;
1060 auto src_set = p_cds[i].srcSet;
1061 auto src_node = core_validation::getSetNode(dev_data, src_set);
1062 auto dst_node = core_validation::getSetNode(dev_data, dst_set);
1063 if (!src_node) {
1064 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
1065 reinterpret_cast<uint64_t &>(src_set), __LINE__, VALIDATION_ERROR_00971, "DS",
1066 "Cannot call vkUpdateDescriptorSets() to copy from descriptor set 0x%" PRIxLEAST64
1067 " that has not been allocated. %s",
1068 reinterpret_cast<uint64_t &>(src_set), validation_error_map[VALIDATION_ERROR_00971]);
1069 } else if (!dst_node) {
1070 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
1071 reinterpret_cast<uint64_t &>(dst_set), __LINE__, VALIDATION_ERROR_00972, "DS",
1072 "Cannot call vkUpdateDescriptorSets() to copy to descriptor set 0x%" PRIxLEAST64
1073 " that has not been allocated. %s",
1074 reinterpret_cast<uint64_t &>(dst_set), validation_error_map[VALIDATION_ERROR_00972]);
1075 } else {
1076 UNIQUE_VALIDATION_ERROR_CODE error_code;
1077 std::string error_str;
1078 if (!dst_node->ValidateCopyUpdate(report_data, &p_cds[i], src_node, &error_code, &error_str)) {
1079 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
1080 reinterpret_cast<uint64_t &>(dst_set), __LINE__, error_code, "DS",
1081 "vkUpdateDescriptorsSets() failed copy update from Descriptor Set 0x%" PRIx64
1082 " to Descriptor Set 0x%" PRIx64 " with error: %s. %s",
1083 reinterpret_cast<uint64_t &>(src_set), reinterpret_cast<uint64_t &>(dst_set),
1084 error_str.c_str(), validation_error_map[error_code]);
1085 }
1086 }
1087 }
1088 return skip_call;
1089 }
1090 // This is a helper function that iterates over a set of Write and Copy updates, pulls the DescriptorSet* for updated
1091 // sets, and then calls their respective Perform[Write|Copy]Update functions.
1092 // Prerequisite : ValidateUpdateDescriptorSets() should be called and return "false" prior to calling PerformUpdateDescriptorSets()
1093 // with the same set of updates.
1094 // This is split from the validate code to allow validation prior to calling down the chain, and then update after
1095 // calling down the chain.
PerformUpdateDescriptorSets(const core_validation::layer_data * dev_data,uint32_t write_count,const VkWriteDescriptorSet * p_wds,uint32_t copy_count,const VkCopyDescriptorSet * p_cds)1096 void cvdescriptorset::PerformUpdateDescriptorSets(const core_validation::layer_data *dev_data, uint32_t write_count,
1097 const VkWriteDescriptorSet *p_wds, uint32_t copy_count,
1098 const VkCopyDescriptorSet *p_cds) {
1099 // Write updates first
1100 uint32_t i = 0;
1101 for (i = 0; i < write_count; ++i) {
1102 auto dest_set = p_wds[i].dstSet;
1103 auto set_node = core_validation::getSetNode(dev_data, dest_set);
1104 if (set_node) {
1105 set_node->PerformWriteUpdate(&p_wds[i]);
1106 }
1107 }
1108 // Now copy updates
1109 for (i = 0; i < copy_count; ++i) {
1110 auto dst_set = p_cds[i].dstSet;
1111 auto src_set = p_cds[i].srcSet;
1112 auto src_node = core_validation::getSetNode(dev_data, src_set);
1113 auto dst_node = core_validation::getSetNode(dev_data, dst_set);
1114 if (src_node && dst_node) {
1115 dst_node->PerformCopyUpdate(&p_cds[i], src_node);
1116 }
1117 }
1118 }
1119 // Validate the state for a given write update but don't actually perform the update
1120 // If an error would occur for this update, return false and fill in details in error_msg string
ValidateWriteUpdate(const debug_report_data * report_data,const VkWriteDescriptorSet * update,UNIQUE_VALIDATION_ERROR_CODE * error_code,std::string * error_msg)1121 bool cvdescriptorset::DescriptorSet::ValidateWriteUpdate(const debug_report_data *report_data, const VkWriteDescriptorSet *update,
1122 UNIQUE_VALIDATION_ERROR_CODE *error_code, std::string *error_msg) {
1123 // Verify idle ds
1124 if (in_use.load()) {
1125 // TODO : Re-using Allocate Idle error code, need write update idle error code
1126 *error_code = VALIDATION_ERROR_00919;
1127 std::stringstream error_str;
1128 error_str << "Cannot call vkUpdateDescriptorSets() to perform write update on descriptor set " << set_
1129 << " that is in use by a command buffer.";
1130 *error_msg = error_str.str();
1131 return false;
1132 }
1133 // Verify dst binding exists
1134 if (!p_layout_->HasBinding(update->dstBinding)) {
1135 *error_code = VALIDATION_ERROR_00936;
1136 std::stringstream error_str;
1137 error_str << "DescriptorSet " << set_ << " does not have binding " << update->dstBinding << ".";
1138 *error_msg = error_str.str();
1139 return false;
1140 }
1141 // We know that binding is valid, verify update and do update on each descriptor
1142 auto start_idx = p_layout_->GetGlobalStartIndexFromBinding(update->dstBinding) + update->dstArrayElement;
1143 auto type = p_layout_->GetTypeFromBinding(update->dstBinding);
1144 if (type != update->descriptorType) {
1145 *error_code = VALIDATION_ERROR_00937;
1146 std::stringstream error_str;
1147 error_str << "Attempting write update to descriptor set " << set_ << " binding #" << update->dstBinding << " with type "
1148 << string_VkDescriptorType(type) << " but update type is " << string_VkDescriptorType(update->descriptorType);
1149 *error_msg = error_str.str();
1150 return false;
1151 }
1152 if ((start_idx + update->descriptorCount) > p_layout_->GetTotalDescriptorCount()) {
1153 *error_code = VALIDATION_ERROR_00938;
1154 std::stringstream error_str;
1155 error_str << "Attempting write update to descriptor set " << set_ << " binding #" << update->dstBinding << " with "
1156 << p_layout_->GetTotalDescriptorCount() << " total descriptors but update of " << update->descriptorCount
1157 << " descriptors starting at binding offset of " << p_layout_->GetGlobalStartIndexFromBinding(update->dstBinding)
1158 << " combined with update array element offset of " << update->dstArrayElement
1159 << " oversteps the size of this descriptor set.";
1160 *error_msg = error_str.str();
1161 return false;
1162 }
1163 // Verify consecutive bindings match (if needed)
1164 if (!p_layout_->VerifyUpdateConsistency(update->dstBinding, update->dstArrayElement, update->descriptorCount, "write update to",
1165 set_, error_msg)) {
1166 *error_code = VALIDATION_ERROR_00938;
1167 return false;
1168 }
1169 // Update is within bounds and consistent so last step is to validate update contents
1170 if (!VerifyWriteUpdateContents(update, start_idx, error_code, error_msg)) {
1171 std::stringstream error_str;
1172 error_str << "Write update to descriptor in set " << set_ << " binding #" << update->dstBinding
1173 << " failed with error message: " << error_msg->c_str();
1174 *error_msg = error_str.str();
1175 return false;
1176 }
1177 // All checks passed, update is clean
1178 return true;
1179 }
1180 // For the given buffer, verify that its creation parameters are appropriate for the given type
1181 // If there's an error, update the error_msg string with details and return false, else return true
ValidateBufferUsage(BUFFER_NODE const * buffer_node,VkDescriptorType type,UNIQUE_VALIDATION_ERROR_CODE * error_code,std::string * error_msg) const1182 bool cvdescriptorset::DescriptorSet::ValidateBufferUsage(BUFFER_NODE const *buffer_node, VkDescriptorType type,
1183 UNIQUE_VALIDATION_ERROR_CODE *error_code, std::string *error_msg) const {
1184 // Verify that usage bits set correctly for given type
1185 auto usage = buffer_node->createInfo.usage;
1186 std::string error_usage_bit;
1187 switch (type) {
1188 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1189 if (!(usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT)) {
1190 *error_code = VALIDATION_ERROR_00950;
1191 error_usage_bit = "VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT";
1192 }
1193 break;
1194 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1195 if (!(usage & VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT)) {
1196 *error_code = VALIDATION_ERROR_00951;
1197 error_usage_bit = "VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT";
1198 }
1199 break;
1200 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1201 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1202 if (!(usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT)) {
1203 *error_code = VALIDATION_ERROR_00946;
1204 error_usage_bit = "VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT";
1205 }
1206 break;
1207 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1208 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1209 if (!(usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT)) {
1210 *error_code = VALIDATION_ERROR_00947;
1211 error_usage_bit = "VK_BUFFER_USAGE_STORAGE_BUFFER_BIT";
1212 }
1213 break;
1214 default:
1215 break;
1216 }
1217 if (!error_usage_bit.empty()) {
1218 std::stringstream error_str;
1219 error_str << "Buffer (" << buffer_node->buffer << ") with usage mask 0x" << usage
1220 << " being used for a descriptor update of type " << string_VkDescriptorType(type) << " does not have "
1221 << error_usage_bit << " set.";
1222 *error_msg = error_str.str();
1223 return false;
1224 }
1225 return true;
1226 }
1227 // For buffer descriptor updates, verify the buffer usage and VkDescriptorBufferInfo struct which includes:
1228 // 1. buffer is valid
1229 // 2. buffer was created with correct usage flags
1230 // 3. offset is less than buffer size
1231 // 4. range is either VK_WHOLE_SIZE or falls in (0, (buffer size - offset)]
1232 // If there's an error, update the error_msg string with details and return false, else return true
ValidateBufferUpdate(VkDescriptorBufferInfo const * buffer_info,VkDescriptorType type,UNIQUE_VALIDATION_ERROR_CODE * error_code,std::string * error_msg) const1233 bool cvdescriptorset::DescriptorSet::ValidateBufferUpdate(VkDescriptorBufferInfo const *buffer_info, VkDescriptorType type,
1234 UNIQUE_VALIDATION_ERROR_CODE *error_code, std::string *error_msg) const {
1235 // TODO : Defaulting to 00962 for all cases here. Need to create new error codes for a few cases below.
1236 *error_code = VALIDATION_ERROR_00962;
1237 // First make sure that buffer is valid
1238 auto buffer_node = getBufferNode(device_data_, buffer_info->buffer);
1239 if (!buffer_node) {
1240 std::stringstream error_str;
1241 error_str << "Invalid VkBuffer: " << buffer_info->buffer;
1242 *error_msg = error_str.str();
1243 return false;
1244 }
1245 if (ValidateMemoryIsBoundToBuffer(device_data_, buffer_node, "vkUpdateDescriptorSets()")) {
1246 // TODO : This is a repeat code, need new code(s) for language in 11.6 Memory Association
1247 *error_msg = "No memory bound to buffer.";
1248 return false;
1249 }
1250 // Verify usage bits
1251 if (!ValidateBufferUsage(buffer_node, type, error_code, error_msg)) {
1252 // error_msg will have been updated by ValidateBufferUsage()
1253 return false;
1254 }
1255 // TODO : Need to also validate device limit offset requirements captured in VALIDATION_ERROR_00944,945
1256 // offset must be less than buffer size
1257 if (buffer_info->offset > buffer_node->createInfo.size) {
1258 *error_code = VALIDATION_ERROR_00959;
1259 std::stringstream error_str;
1260 error_str << "VkDescriptorBufferInfo offset of " << buffer_info->offset << " is greater than buffer " << buffer_node->buffer
1261 << " size of " << buffer_node->createInfo.size;
1262 *error_msg = error_str.str();
1263 return false;
1264 }
1265 // TODO : Need to also validate device limit range requirements captured in VALIDATION_ERROR_00948,949
1266 if (buffer_info->range != VK_WHOLE_SIZE) {
1267 // Range must be VK_WHOLE_SIZE or > 0
1268 if (!buffer_info->range) {
1269 *error_code = VALIDATION_ERROR_00960;
1270 std::stringstream error_str;
1271 error_str << "VkDescriptorBufferInfo range is not VK_WHOLE_SIZE and is zero, which is not allowed.";
1272 *error_msg = error_str.str();
1273 return false;
1274 }
1275 // Range must be VK_WHOLE_SIZE or <= (buffer size - offset)
1276 if (buffer_info->range > (buffer_node->createInfo.size - buffer_info->offset)) {
1277 *error_code = VALIDATION_ERROR_00961;
1278 std::stringstream error_str;
1279 error_str << "VkDescriptorBufferInfo range is " << buffer_info->range << " which is greater than buffer size ("
1280 << buffer_node->createInfo.size << ") minus requested offset of " << buffer_info->offset;
1281 *error_msg = error_str.str();
1282 return false;
1283 }
1284 }
1285 return true;
1286 }
1287
1288 // Verify that the contents of the update are ok, but don't perform actual update
VerifyWriteUpdateContents(const VkWriteDescriptorSet * update,const uint32_t index,UNIQUE_VALIDATION_ERROR_CODE * error_code,std::string * error_msg) const1289 bool cvdescriptorset::DescriptorSet::VerifyWriteUpdateContents(const VkWriteDescriptorSet *update, const uint32_t index,
1290 UNIQUE_VALIDATION_ERROR_CODE *error_code,
1291 std::string *error_msg) const {
1292 switch (update->descriptorType) {
1293 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
1294 for (uint32_t di = 0; di < update->descriptorCount; ++di) {
1295 // Validate image
1296 auto image_view = update->pImageInfo[di].imageView;
1297 auto image_layout = update->pImageInfo[di].imageLayout;
1298 if (!ValidateImageUpdate(image_view, image_layout, update->descriptorType, device_data_, error_code, error_msg)) {
1299 std::stringstream error_str;
1300 error_str << "Attempted write update to combined image sampler descriptor failed due to: " << error_msg->c_str();
1301 *error_msg = error_str.str();
1302 return false;
1303 }
1304 }
1305 // Intentional fall-through to validate sampler
1306 }
1307 case VK_DESCRIPTOR_TYPE_SAMPLER: {
1308 for (uint32_t di = 0; di < update->descriptorCount; ++di) {
1309 if (!descriptors_[index + di].get()->IsImmutableSampler()) {
1310 if (!ValidateSampler(update->pImageInfo[di].sampler, device_data_)) {
1311 *error_code = VALIDATION_ERROR_00942;
1312 std::stringstream error_str;
1313 error_str << "Attempted write update to sampler descriptor with invalid sampler: "
1314 << update->pImageInfo[di].sampler << ".";
1315 *error_msg = error_str.str();
1316 return false;
1317 }
1318 } else {
1319 // TODO : Warn here
1320 }
1321 }
1322 break;
1323 }
1324 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1325 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1326 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: {
1327 for (uint32_t di = 0; di < update->descriptorCount; ++di) {
1328 auto image_view = update->pImageInfo[di].imageView;
1329 auto image_layout = update->pImageInfo[di].imageLayout;
1330 if (!ValidateImageUpdate(image_view, image_layout, update->descriptorType, device_data_, error_code, error_msg)) {
1331 std::stringstream error_str;
1332 error_str << "Attempted write update to image descriptor failed due to: " << error_msg->c_str();
1333 *error_msg = error_str.str();
1334 return false;
1335 }
1336 }
1337 break;
1338 }
1339 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1340 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
1341 for (uint32_t di = 0; di < update->descriptorCount; ++di) {
1342 auto buffer_view = update->pTexelBufferView[di];
1343 auto bv_state = getBufferViewState(device_data_, buffer_view);
1344 if (!bv_state) {
1345 *error_code = VALIDATION_ERROR_00940;
1346 std::stringstream error_str;
1347 error_str << "Attempted write update to texel buffer descriptor with invalid buffer view: " << buffer_view;
1348 *error_msg = error_str.str();
1349 return false;
1350 }
1351 auto buffer = bv_state->create_info.buffer;
1352 if (!ValidateBufferUsage(getBufferNode(device_data_, buffer), update->descriptorType, error_code, error_msg)) {
1353 std::stringstream error_str;
1354 error_str << "Attempted write update to texel buffer descriptor failed due to: " << error_msg->c_str();
1355 *error_msg = error_str.str();
1356 return false;
1357 }
1358 }
1359 break;
1360 }
1361 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1362 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1363 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1364 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
1365 for (uint32_t di = 0; di < update->descriptorCount; ++di) {
1366 if (!ValidateBufferUpdate(update->pBufferInfo + di, update->descriptorType, error_code, error_msg)) {
1367 std::stringstream error_str;
1368 error_str << "Attempted write update to buffer descriptor failed due to: " << error_msg->c_str();
1369 *error_msg = error_str.str();
1370 return false;
1371 }
1372 }
1373 break;
1374 }
1375 default:
1376 assert(0); // We've already verified update type so should never get here
1377 break;
1378 }
1379 // All checks passed so update contents are good
1380 return true;
1381 }
1382 // Verify that the contents of the update are ok, but don't perform actual update
VerifyCopyUpdateContents(const VkCopyDescriptorSet * update,const DescriptorSet * src_set,VkDescriptorType type,uint32_t index,UNIQUE_VALIDATION_ERROR_CODE * error_code,std::string * error_msg) const1383 bool cvdescriptorset::DescriptorSet::VerifyCopyUpdateContents(const VkCopyDescriptorSet *update, const DescriptorSet *src_set,
1384 VkDescriptorType type, uint32_t index,
1385 UNIQUE_VALIDATION_ERROR_CODE *error_code,
1386 std::string *error_msg) const {
1387 // Note : Repurposing some Write update error codes here as specific details aren't called out for copy updates like they are
1388 // for write updates
1389 switch (src_set->descriptors_[index]->descriptor_class) {
1390 case PlainSampler: {
1391 for (uint32_t di = 0; di < update->descriptorCount; ++di) {
1392 if (!src_set->descriptors_[index + di]->IsImmutableSampler()) {
1393 auto update_sampler = static_cast<SamplerDescriptor *>(src_set->descriptors_[index + di].get())->GetSampler();
1394 if (!ValidateSampler(update_sampler, device_data_)) {
1395 *error_code = VALIDATION_ERROR_00942;
1396 std::stringstream error_str;
1397 error_str << "Attempted copy update to sampler descriptor with invalid sampler: " << update_sampler << ".";
1398 *error_msg = error_str.str();
1399 return false;
1400 }
1401 } else {
1402 // TODO : Warn here
1403 }
1404 }
1405 break;
1406 }
1407 case ImageSampler: {
1408 for (uint32_t di = 0; di < update->descriptorCount; ++di) {
1409 auto img_samp_desc = static_cast<const ImageSamplerDescriptor *>(src_set->descriptors_[index + di].get());
1410 // First validate sampler
1411 if (!img_samp_desc->IsImmutableSampler()) {
1412 auto update_sampler = img_samp_desc->GetSampler();
1413 if (!ValidateSampler(update_sampler, device_data_)) {
1414 *error_code = VALIDATION_ERROR_00942;
1415 std::stringstream error_str;
1416 error_str << "Attempted copy update to sampler descriptor with invalid sampler: " << update_sampler << ".";
1417 *error_msg = error_str.str();
1418 return false;
1419 }
1420 } else {
1421 // TODO : Warn here
1422 }
1423 // Validate image
1424 auto image_view = img_samp_desc->GetImageView();
1425 auto image_layout = img_samp_desc->GetImageLayout();
1426 if (!ValidateImageUpdate(image_view, image_layout, type, device_data_, error_code, error_msg)) {
1427 std::stringstream error_str;
1428 error_str << "Attempted copy update to combined image sampler descriptor failed due to: " << error_msg->c_str();
1429 *error_msg = error_str.str();
1430 return false;
1431 }
1432 }
1433 break;
1434 }
1435 case Image: {
1436 for (uint32_t di = 0; di < update->descriptorCount; ++di) {
1437 auto img_desc = static_cast<const ImageDescriptor *>(src_set->descriptors_[index + di].get());
1438 auto image_view = img_desc->GetImageView();
1439 auto image_layout = img_desc->GetImageLayout();
1440 if (!ValidateImageUpdate(image_view, image_layout, type, device_data_, error_code, error_msg)) {
1441 std::stringstream error_str;
1442 error_str << "Attempted copy update to image descriptor failed due to: " << error_msg->c_str();
1443 *error_msg = error_str.str();
1444 return false;
1445 }
1446 }
1447 break;
1448 }
1449 case TexelBuffer: {
1450 for (uint32_t di = 0; di < update->descriptorCount; ++di) {
1451 auto buffer_view = static_cast<TexelDescriptor *>(src_set->descriptors_[index + di].get())->GetBufferView();
1452 auto bv_state = getBufferViewState(device_data_, buffer_view);
1453 if (!bv_state) {
1454 *error_code = VALIDATION_ERROR_00940;
1455 std::stringstream error_str;
1456 error_str << "Attempted copy update to texel buffer descriptor with invalid buffer view: " << buffer_view;
1457 *error_msg = error_str.str();
1458 return false;
1459 }
1460 auto buffer = bv_state->create_info.buffer;
1461 if (!ValidateBufferUsage(getBufferNode(device_data_, buffer), type, error_code, error_msg)) {
1462 std::stringstream error_str;
1463 error_str << "Attempted copy update to texel buffer descriptor failed due to: " << error_msg->c_str();
1464 *error_msg = error_str.str();
1465 return false;
1466 }
1467 }
1468 break;
1469 }
1470 case GeneralBuffer: {
1471 for (uint32_t di = 0; di < update->descriptorCount; ++di) {
1472 auto buffer = static_cast<BufferDescriptor *>(src_set->descriptors_[index + di].get())->GetBuffer();
1473 if (!ValidateBufferUsage(getBufferNode(device_data_, buffer), type, error_code, error_msg)) {
1474 std::stringstream error_str;
1475 error_str << "Attempted copy update to buffer descriptor failed due to: " << error_msg->c_str();
1476 *error_msg = error_str.str();
1477 return false;
1478 }
1479 }
1480 break;
1481 }
1482 default:
1483 assert(0); // We've already verified update type so should never get here
1484 break;
1485 }
1486 // All checks passed so update contents are good
1487 return true;
1488 }
1489 // Verify that the state at allocate time is correct, but don't actually allocate the sets yet
ValidateAllocateDescriptorSets(const debug_report_data * report_data,const VkDescriptorSetAllocateInfo * p_alloc_info,const core_validation::layer_data * dev_data,AllocateDescriptorSetsData * ds_data)1490 bool cvdescriptorset::ValidateAllocateDescriptorSets(const debug_report_data *report_data,
1491 const VkDescriptorSetAllocateInfo *p_alloc_info,
1492 const core_validation::layer_data *dev_data,
1493 AllocateDescriptorSetsData *ds_data) {
1494 bool skip_call = false;
1495
1496 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
1497 auto layout = getDescriptorSetLayout(dev_data, p_alloc_info->pSetLayouts[i]);
1498 if (!layout) {
1499 skip_call |=
1500 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
1501 reinterpret_cast<const uint64_t &>(p_alloc_info->pSetLayouts[i]), __LINE__, DRAWSTATE_INVALID_LAYOUT, "DS",
1502 "Unable to find set layout node for layout 0x%" PRIxLEAST64 " specified in vkAllocateDescriptorSets() call",
1503 reinterpret_cast<const uint64_t &>(p_alloc_info->pSetLayouts[i]));
1504 } else {
1505 ds_data->layout_nodes[i] = layout;
1506 // Count total descriptors required per type
1507 for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
1508 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
1509 uint32_t typeIndex = static_cast<uint32_t>(binding_layout->descriptorType);
1510 ds_data->required_descriptors_by_type[typeIndex] += binding_layout->descriptorCount;
1511 }
1512 }
1513 }
1514 auto pool_state = getDescriptorPoolState(dev_data, p_alloc_info->descriptorPool);
1515 // Track number of descriptorSets allowable in this pool
1516 if (pool_state->availableSets < p_alloc_info->descriptorSetCount) {
1517 skip_call |= log_msg(
1518 report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
1519 reinterpret_cast<uint64_t &>(pool_state->pool), __LINE__, DRAWSTATE_DESCRIPTOR_POOL_EMPTY, "DS",
1520 "Unable to allocate %u descriptorSets from pool 0x%" PRIxLEAST64 ". This pool only has %d descriptorSets remaining.",
1521 p_alloc_info->descriptorSetCount, reinterpret_cast<uint64_t &>(pool_state->pool), pool_state->availableSets);
1522 }
1523 // Determine whether descriptor counts are satisfiable
1524 for (uint32_t i = 0; i < VK_DESCRIPTOR_TYPE_RANGE_SIZE; i++) {
1525 if (ds_data->required_descriptors_by_type[i] > pool_state->availableDescriptorTypeCount[i]) {
1526 skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
1527 reinterpret_cast<const uint64_t &>(pool_state->pool), __LINE__, DRAWSTATE_DESCRIPTOR_POOL_EMPTY,
1528 "DS", "Unable to allocate %u descriptors of type %s from pool 0x%" PRIxLEAST64
1529 ". This pool only has %d descriptors of this type remaining.",
1530 ds_data->required_descriptors_by_type[i], string_VkDescriptorType(VkDescriptorType(i)),
1531 reinterpret_cast<uint64_t &>(pool_state->pool), pool_state->availableDescriptorTypeCount[i]);
1532 }
1533 }
1534
1535 return skip_call;
1536 }
1537 // Decrement allocated sets from the pool and insert new sets into set_map
PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo * p_alloc_info,const VkDescriptorSet * descriptor_sets,const AllocateDescriptorSetsData * ds_data,std::unordered_map<VkDescriptorPool,DESCRIPTOR_POOL_STATE * > * pool_map,std::unordered_map<VkDescriptorSet,cvdescriptorset::DescriptorSet * > * set_map,const core_validation::layer_data * dev_data)1538 void cvdescriptorset::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
1539 const VkDescriptorSet *descriptor_sets,
1540 const AllocateDescriptorSetsData *ds_data,
1541 std::unordered_map<VkDescriptorPool, DESCRIPTOR_POOL_STATE *> *pool_map,
1542 std::unordered_map<VkDescriptorSet, cvdescriptorset::DescriptorSet *> *set_map,
1543 const core_validation::layer_data *dev_data) {
1544 auto pool_state = (*pool_map)[p_alloc_info->descriptorPool];
1545 /* Account for sets and individual descriptors allocated from pool */
1546 pool_state->availableSets -= p_alloc_info->descriptorSetCount;
1547 for (uint32_t i = 0; i < VK_DESCRIPTOR_TYPE_RANGE_SIZE; i++) {
1548 pool_state->availableDescriptorTypeCount[i] -= ds_data->required_descriptors_by_type[i];
1549 }
1550 /* Create tracking object for each descriptor set; insert into
1551 * global map and the pool's set.
1552 */
1553 for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
1554 auto new_ds = new cvdescriptorset::DescriptorSet(descriptor_sets[i], p_alloc_info->descriptorPool, ds_data->layout_nodes[i],
1555 dev_data);
1556
1557 pool_state->sets.insert(new_ds);
1558 new_ds->in_use.store(0);
1559 (*set_map)[descriptor_sets[i]] = new_ds;
1560 }
1561 }
1562