• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright (c) 2015-2016 The Khronos Group Inc.
2  * Copyright (c) 2015-2016 Valve Corporation
3  * Copyright (c) 2015-2016 LunarG, Inc.
4  * Copyright (C) 2015-2016 Google Inc.
5  *
6  * Licensed under the Apache License, Version 2.0 (the "License");
7  * you may not use this file except in compliance with the License.
8  * You may obtain a copy of the License at
9  *
10  *     http://www.apache.org/licenses/LICENSE-2.0
11  *
12  * Unless required by applicable law or agreed to in writing, software
13  * distributed under the License is distributed on an "AS IS" BASIS,
14  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  * See the License for the specific language governing permissions and
16  * limitations under the License.
17  *
18  * Author: Tobin Ehlis <tobine@google.com>
19  *         John Zulauf <jzulauf@lunarg.com>
20  */
21 
22 // Allow use of STL min and max functions in Windows
23 #define NOMINMAX
24 
25 #include "descriptor_sets.h"
26 #include "vk_enum_string_helper.h"
27 #include "vk_safe_struct.h"
28 #include "buffer_validation.h"
29 #include <sstream>
30 #include <algorithm>
31 
32 struct BindingNumCmp {
operator ()BindingNumCmp33     bool operator()(const VkDescriptorSetLayoutBinding *a, const VkDescriptorSetLayoutBinding *b) const {
34         return a->binding < b->binding;
35     }
36 };
37 
38 // Construct DescriptorSetLayout instance from given create info
39 // Proactively reserve and resize as possible, as the reallocation was visible in profiling
DescriptorSetLayout(const VkDescriptorSetLayoutCreateInfo * p_create_info,const VkDescriptorSetLayout layout)40 cvdescriptorset::DescriptorSetLayout::DescriptorSetLayout(const VkDescriptorSetLayoutCreateInfo *p_create_info,
41                                                           const VkDescriptorSetLayout layout)
42     : layout_(layout),
43       layout_destroyed_(false),
44       flags_(p_create_info->flags),
45       binding_count_(0),
46       descriptor_count_(0),
47       dynamic_descriptor_count_(0) {
48     binding_type_stats_ = {0, 0, 0};
49     std::set<const VkDescriptorSetLayoutBinding *, BindingNumCmp> sorted_bindings;
50     const uint32_t input_bindings_count = p_create_info->bindingCount;
51     // Sort the input bindings in binding number order, eliminating duplicates
52     for (uint32_t i = 0; i < input_bindings_count; i++) {
53         sorted_bindings.insert(p_create_info->pBindings + i);
54     }
55 
56     // Store the create info in the sorted order from above
57     std::map<uint32_t, uint32_t> binding_to_dyn_count;
58     uint32_t index = 0;
59     binding_count_ = static_cast<uint32_t>(sorted_bindings.size());
60     bindings_.reserve(binding_count_);
61     binding_to_index_map_.reserve(binding_count_);
62     for (auto input_binding : sorted_bindings) {
63         // Add to binding and map, s.t. it is robust to invalid duplication of binding_num
64         const auto binding_num = input_binding->binding;
65         binding_to_index_map_[binding_num] = index++;
66         bindings_.emplace_back(input_binding);
67         auto &binding_info = bindings_.back();
68 
69         descriptor_count_ += binding_info.descriptorCount;
70         if (binding_info.descriptorCount > 0) {
71             non_empty_bindings_.insert(binding_num);
72         }
73 
74         if (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
75             binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) {
76             binding_to_dyn_count[binding_num] = binding_info.descriptorCount;
77             dynamic_descriptor_count_ += binding_info.descriptorCount;
78             binding_type_stats_.dynamic_buffer_count++;
79         } else if ((binding_info.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
80                    (binding_info.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)) {
81             binding_type_stats_.non_dynamic_buffer_count++;
82         } else {
83             binding_type_stats_.image_sampler_count++;
84         }
85     }
86     assert(bindings_.size() == binding_count_);
87     uint32_t global_index = 0;
88     binding_to_global_index_range_map_.reserve(binding_count_);
89     // Vector order is finalized so create maps of bindings to descriptors and descriptors to indices
90     for (uint32_t i = 0; i < binding_count_; ++i) {
91         auto binding_num = bindings_[i].binding;
92         auto final_index = global_index + bindings_[i].descriptorCount;
93         binding_to_global_index_range_map_[binding_num] = IndexRange(global_index, final_index);
94         if (final_index != global_index) {
95             global_start_to_index_map_[global_index] = i;
96         }
97         global_index = final_index;
98     }
99 
100     // Now create dyn offset array mapping for any dynamic descriptors
101     uint32_t dyn_array_idx = 0;
102     binding_to_dynamic_array_idx_map_.reserve(binding_to_dyn_count.size());
103     for (const auto &bc_pair : binding_to_dyn_count) {
104         binding_to_dynamic_array_idx_map_[bc_pair.first] = dyn_array_idx;
105         dyn_array_idx += bc_pair.second;
106     }
107 }
108 
109 // Validate descriptor set layout create info
ValidateCreateInfo(const debug_report_data * report_data,const VkDescriptorSetLayoutCreateInfo * create_info,const bool push_descriptor_ext,const uint32_t max_push_descriptors)110 bool cvdescriptorset::DescriptorSetLayout::ValidateCreateInfo(const debug_report_data *report_data,
111                                                               const VkDescriptorSetLayoutCreateInfo *create_info,
112                                                               const bool push_descriptor_ext, const uint32_t max_push_descriptors) {
113     bool skip = false;
114     std::unordered_set<uint32_t> bindings;
115     uint64_t total_descriptors = 0;
116 
117     const bool push_descriptor_set = create_info->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR;
118     if (push_descriptor_set && !push_descriptor_ext) {
119         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__,
120                         DRAWSTATE_EXTENSION_NOT_ENABLED, "DS",
121                         "Attemped to use %s in %s but its required extension %s has not been enabled.\n",
122                         "VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR", "VkDescriptorSetLayoutCreateInfo::flags",
123                         VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
124     }
125 
126     auto valid_type = [push_descriptor_set](const VkDescriptorType type) {
127         return !push_descriptor_set ||
128                ((type != VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) && (type != VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC));
129     };
130 
131     for (uint32_t i = 0; i < create_info->bindingCount; ++i) {
132         const auto &binding_info = create_info->pBindings[i];
133         if (!bindings.insert(binding_info.binding).second) {
134             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__,
135                             VALIDATION_ERROR_0500022e, "DS", "duplicated binding number in VkDescriptorSetLayoutBinding. %s",
136                             validation_error_map[VALIDATION_ERROR_0500022e]);
137         }
138         if (!valid_type(binding_info.descriptorType)) {
139             skip |=
140                 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__,
141                         VALIDATION_ERROR_05000230, "DS",
142                         "invalid type %s ,for push descriptors in VkDescriptorSetLayoutBinding entry %" PRIu32 ". %s",
143                         string_VkDescriptorType(binding_info.descriptorType), i, validation_error_map[VALIDATION_ERROR_05000230]);
144         }
145         total_descriptors += binding_info.descriptorCount;
146     }
147 
148     if ((push_descriptor_set) && (total_descriptors > max_push_descriptors)) {
149         const char *undefined = push_descriptor_ext ? "" : " -- undefined";
150         skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__,
151                         VALIDATION_ERROR_05000232, "DS",
152                         "for push descriptor, total descriptor count in layout (%" PRIu64
153                         ") must not be greater than VkPhysicalDevicePushDescriptorPropertiesKHR::maxPushDescriptors (%" PRIu32
154                         "%s). %s",
155                         total_descriptors, max_push_descriptors, undefined, validation_error_map[VALIDATION_ERROR_05000232]);
156     }
157 
158     return skip;
159 }
160 
161 // Return valid index or "end" i.e. binding_count_;
162 // The asserts in "Get" are reduced to the set where no valid answer(like null or 0) could be given
163 // Common code for all binding lookups.
GetIndexFromBinding(uint32_t binding) const164 uint32_t cvdescriptorset::DescriptorSetLayout::GetIndexFromBinding(uint32_t binding) const {
165     const auto &bi_itr = binding_to_index_map_.find(binding);
166     if (bi_itr != binding_to_index_map_.cend()) return bi_itr->second;
167     return GetBindingCount();
168 }
GetDescriptorSetLayoutBindingPtrFromIndex(const uint32_t index) const169 VkDescriptorSetLayoutBinding const *cvdescriptorset::DescriptorSetLayout::GetDescriptorSetLayoutBindingPtrFromIndex(
170     const uint32_t index) const {
171     if (index >= bindings_.size()) return nullptr;
172     return bindings_[index].ptr();
173 }
174 // Return descriptorCount for given index, 0 if index is unavailable
GetDescriptorCountFromIndex(const uint32_t index) const175 uint32_t cvdescriptorset::DescriptorSetLayout::GetDescriptorCountFromIndex(const uint32_t index) const {
176     if (index >= bindings_.size()) return 0;
177     return bindings_[index].descriptorCount;
178 }
179 // For the given index, return descriptorType
GetTypeFromIndex(const uint32_t index) const180 VkDescriptorType cvdescriptorset::DescriptorSetLayout::GetTypeFromIndex(const uint32_t index) const {
181     assert(index < bindings_.size());
182     if (index < bindings_.size()) return bindings_[index].descriptorType;
183     return VK_DESCRIPTOR_TYPE_MAX_ENUM;
184 }
185 // For the given index, return stageFlags
GetStageFlagsFromIndex(const uint32_t index) const186 VkShaderStageFlags cvdescriptorset::DescriptorSetLayout::GetStageFlagsFromIndex(const uint32_t index) const {
187     assert(index < bindings_.size());
188     if (index < bindings_.size()) return bindings_[index].stageFlags;
189     return VkShaderStageFlags(0);
190 }
191 
192 // For the given global index, return index
GetIndexFromGlobalIndex(const uint32_t global_index) const193 uint32_t cvdescriptorset::DescriptorSetLayout::GetIndexFromGlobalIndex(const uint32_t global_index) const {
194     auto start_it = global_start_to_index_map_.upper_bound(global_index);
195     uint32_t index = binding_count_;
196     assert(start_it != global_start_to_index_map_.cbegin());
197     if (start_it != global_start_to_index_map_.cbegin()) {
198         --start_it;
199         index = start_it->second;
200 #ifndef NDEBUG
201         const auto &range = GetGlobalIndexRangeFromBinding(bindings_[index].binding);
202         assert(range.start <= global_index && global_index < range.end);
203 #endif
204     }
205     return index;
206 }
207 
208 // For the given binding, return the global index range
209 // As start and end are often needed in pairs, get both with a single hash lookup.
GetGlobalIndexRangeFromBinding(const uint32_t binding) const210 const cvdescriptorset::IndexRange &cvdescriptorset::DescriptorSetLayout::GetGlobalIndexRangeFromBinding(
211     const uint32_t binding) const {
212     assert(binding_to_global_index_range_map_.count(binding));
213     // In error case max uint32_t so index is out of bounds to break ASAP
214     const static IndexRange kInvalidRange = {0xFFFFFFFF, 0xFFFFFFFF};
215     const auto &range_it = binding_to_global_index_range_map_.find(binding);
216     if (range_it != binding_to_global_index_range_map_.end()) {
217         return range_it->second;
218     }
219     return kInvalidRange;
220 }
221 
222 // For given binding, return ptr to ImmutableSampler array
GetImmutableSamplerPtrFromBinding(const uint32_t binding) const223 VkSampler const *cvdescriptorset::DescriptorSetLayout::GetImmutableSamplerPtrFromBinding(const uint32_t binding) const {
224     const auto &bi_itr = binding_to_index_map_.find(binding);
225     if (bi_itr != binding_to_index_map_.end()) {
226         return bindings_[bi_itr->second].pImmutableSamplers;
227     }
228     return nullptr;
229 }
230 // Move to next valid binding having a non-zero binding count
GetNextValidBinding(const uint32_t binding) const231 uint32_t cvdescriptorset::DescriptorSetLayout::GetNextValidBinding(const uint32_t binding) const {
232     auto it = non_empty_bindings_.upper_bound(binding);
233     assert(it != non_empty_bindings_.cend());
234     if (it != non_empty_bindings_.cend()) return *it;
235     return GetMaxBinding() + 1;
236 }
237 // For given index, return ptr to ImmutableSampler array
GetImmutableSamplerPtrFromIndex(const uint32_t index) const238 VkSampler const *cvdescriptorset::DescriptorSetLayout::GetImmutableSamplerPtrFromIndex(const uint32_t index) const {
239     if (index < bindings_.size()) {
240         return bindings_[index].pImmutableSamplers;
241     }
242     return nullptr;
243 }
244 // If our layout is compatible with rh_ds_layout, return true,
245 //  else return false and fill in error_msg will description of what causes incompatibility
IsCompatible(DescriptorSetLayout const * const rh_ds_layout,std::string * error_msg) const246 bool cvdescriptorset::DescriptorSetLayout::IsCompatible(DescriptorSetLayout const *const rh_ds_layout,
247                                                         std::string *error_msg) const {
248     // Trivial case
249     if (layout_ == rh_ds_layout->GetDescriptorSetLayout()) return true;
250     if (descriptor_count_ != rh_ds_layout->descriptor_count_) {
251         std::stringstream error_str;
252         error_str << "DescriptorSetLayout " << layout_ << " has " << descriptor_count_ << " descriptors, but DescriptorSetLayout "
253                   << rh_ds_layout->GetDescriptorSetLayout() << ", which comes from pipelineLayout, has "
254                   << rh_ds_layout->descriptor_count_ << " descriptors.";
255         *error_msg = error_str.str();
256         return false;  // trivial fail case
257     }
258     // Descriptor counts match so need to go through bindings one-by-one
259     //  and verify that type and stageFlags match
260     for (auto binding : bindings_) {
261         // TODO : Do we also need to check immutable samplers?
262         // VkDescriptorSetLayoutBinding *rh_binding;
263         if (binding.descriptorCount != rh_ds_layout->GetDescriptorCountFromBinding(binding.binding)) {
264             std::stringstream error_str;
265             error_str << "Binding " << binding.binding << " for DescriptorSetLayout " << layout_ << " has a descriptorCount of "
266                       << binding.descriptorCount << " but binding " << binding.binding << " for DescriptorSetLayout "
267                       << rh_ds_layout->GetDescriptorSetLayout() << ", which comes from pipelineLayout, has a descriptorCount of "
268                       << rh_ds_layout->GetDescriptorCountFromBinding(binding.binding);
269             *error_msg = error_str.str();
270             return false;
271         } else if (binding.descriptorType != rh_ds_layout->GetTypeFromBinding(binding.binding)) {
272             std::stringstream error_str;
273             error_str << "Binding " << binding.binding << " for DescriptorSetLayout " << layout_ << " is type '"
274                       << string_VkDescriptorType(binding.descriptorType) << "' but binding " << binding.binding
275                       << " for DescriptorSetLayout " << rh_ds_layout->GetDescriptorSetLayout()
276                       << ", which comes from pipelineLayout, is type '"
277                       << string_VkDescriptorType(rh_ds_layout->GetTypeFromBinding(binding.binding)) << "'";
278             *error_msg = error_str.str();
279             return false;
280         } else if (binding.stageFlags != rh_ds_layout->GetStageFlagsFromBinding(binding.binding)) {
281             std::stringstream error_str;
282             error_str << "Binding " << binding.binding << " for DescriptorSetLayout " << layout_ << " has stageFlags "
283                       << binding.stageFlags << " but binding " << binding.binding << " for DescriptorSetLayout "
284                       << rh_ds_layout->GetDescriptorSetLayout() << ", which comes from pipelineLayout, has stageFlags "
285                       << rh_ds_layout->GetStageFlagsFromBinding(binding.binding);
286             *error_msg = error_str.str();
287             return false;
288         }
289     }
290     return true;
291 }
292 
IsNextBindingConsistent(const uint32_t binding) const293 bool cvdescriptorset::DescriptorSetLayout::IsNextBindingConsistent(const uint32_t binding) const {
294     if (!binding_to_index_map_.count(binding + 1)) return false;
295     auto const &bi_itr = binding_to_index_map_.find(binding);
296     if (bi_itr != binding_to_index_map_.end()) {
297         const auto &next_bi_itr = binding_to_index_map_.find(binding + 1);
298         if (next_bi_itr != binding_to_index_map_.end()) {
299             auto type = bindings_[bi_itr->second].descriptorType;
300             auto stage_flags = bindings_[bi_itr->second].stageFlags;
301             auto immut_samp = bindings_[bi_itr->second].pImmutableSamplers ? true : false;
302             if ((type != bindings_[next_bi_itr->second].descriptorType) ||
303                 (stage_flags != bindings_[next_bi_itr->second].stageFlags) ||
304                 (immut_samp != (bindings_[next_bi_itr->second].pImmutableSamplers ? true : false))) {
305                 return false;
306             }
307             return true;
308         }
309     }
310     return false;
311 }
312 // Starting at offset descriptor of given binding, parse over update_count
313 //  descriptor updates and verify that for any binding boundaries that are crossed, the next binding(s) are all consistent
314 //  Consistency means that their type, stage flags, and whether or not they use immutable samplers matches
315 //  If so, return true. If not, fill in error_msg and return false
VerifyUpdateConsistency(uint32_t current_binding,uint32_t offset,uint32_t update_count,const char * type,const VkDescriptorSet set,std::string * error_msg) const316 bool cvdescriptorset::DescriptorSetLayout::VerifyUpdateConsistency(uint32_t current_binding, uint32_t offset, uint32_t update_count,
317                                                                    const char *type, const VkDescriptorSet set,
318                                                                    std::string *error_msg) const {
319     // Verify consecutive bindings match (if needed)
320     auto orig_binding = current_binding;
321     // Track count of descriptors in the current_bindings that are remaining to be updated
322     auto binding_remaining = GetDescriptorCountFromBinding(current_binding);
323     // First, it's legal to offset beyond your own binding so handle that case
324     //  Really this is just searching for the binding in which the update begins and adjusting offset accordingly
325     while (offset >= binding_remaining) {
326         // Advance to next binding, decrement offset by binding size
327         offset -= binding_remaining;
328         binding_remaining = GetDescriptorCountFromBinding(++current_binding);
329     }
330     binding_remaining -= offset;
331     while (update_count > binding_remaining) {  // While our updates overstep current binding
332         // Verify next consecutive binding matches type, stage flags & immutable sampler use
333         if (!IsNextBindingConsistent(current_binding++)) {
334             std::stringstream error_str;
335             error_str << "Attempting " << type << " descriptor set " << set << " binding #" << orig_binding << " with #"
336                       << update_count
337                       << " descriptors being updated but this update oversteps the bounds of this binding and the next binding is "
338                          "not consistent with current binding so this update is invalid.";
339             *error_msg = error_str.str();
340             return false;
341         }
342         // For sake of this check consider the bindings updated and grab count for next binding
343         update_count -= binding_remaining;
344         binding_remaining = GetDescriptorCountFromBinding(current_binding);
345     }
346     return true;
347 }
348 
AllocateDescriptorSetsData(uint32_t count)349 cvdescriptorset::AllocateDescriptorSetsData::AllocateDescriptorSetsData(uint32_t count)
350     : required_descriptors_by_type{}, layout_nodes(count, nullptr) {}
351 
DescriptorSet(const VkDescriptorSet set,const VkDescriptorPool pool,const std::shared_ptr<DescriptorSetLayout const> & layout,layer_data * dev_data)352 cvdescriptorset::DescriptorSet::DescriptorSet(const VkDescriptorSet set, const VkDescriptorPool pool,
353                                               const std::shared_ptr<DescriptorSetLayout const> &layout, layer_data *dev_data)
354     : some_update_(false),
355       set_(set),
356       pool_state_(nullptr),
357       p_layout_(layout),
358       device_data_(dev_data),
359       limits_(GetPhysDevProperties(dev_data)->properties.limits) {
360     pool_state_ = GetDescriptorPoolState(dev_data, pool);
361     // Foreach binding, create default descriptors of given type
362     descriptors_.reserve(p_layout_->GetTotalDescriptorCount());
363     for (uint32_t i = 0; i < p_layout_->GetBindingCount(); ++i) {
364         auto type = p_layout_->GetTypeFromIndex(i);
365         switch (type) {
366             case VK_DESCRIPTOR_TYPE_SAMPLER: {
367                 auto immut_sampler = p_layout_->GetImmutableSamplerPtrFromIndex(i);
368                 for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di) {
369                     if (immut_sampler) {
370                         descriptors_.emplace_back(new SamplerDescriptor(immut_sampler + di));
371                         some_update_ = true;  // Immutable samplers are updated at creation
372                     } else
373                         descriptors_.emplace_back(new SamplerDescriptor(nullptr));
374                 }
375                 break;
376             }
377             case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
378                 auto immut = p_layout_->GetImmutableSamplerPtrFromIndex(i);
379                 for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di) {
380                     if (immut) {
381                         descriptors_.emplace_back(new ImageSamplerDescriptor(immut + di));
382                         some_update_ = true;  // Immutable samplers are updated at creation
383                     } else
384                         descriptors_.emplace_back(new ImageSamplerDescriptor(nullptr));
385                 }
386                 break;
387             }
388             // ImageDescriptors
389             case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
390             case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
391             case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
392                 for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di)
393                     descriptors_.emplace_back(new ImageDescriptor(type));
394                 break;
395             case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
396             case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
397                 for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di)
398                     descriptors_.emplace_back(new TexelDescriptor(type));
399                 break;
400             case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
401             case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
402             case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
403             case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
404                 for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di)
405                     descriptors_.emplace_back(new BufferDescriptor(type));
406                 break;
407             default:
408                 assert(0);  // Bad descriptor type specified
409                 break;
410         }
411     }
412 }
413 
~DescriptorSet()414 cvdescriptorset::DescriptorSet::~DescriptorSet() { InvalidateBoundCmdBuffers(); }
415 
string_descriptor_req_view_type(descriptor_req req)416 static std::string string_descriptor_req_view_type(descriptor_req req) {
417     std::string result("");
418     for (unsigned i = 0; i <= VK_IMAGE_VIEW_TYPE_END_RANGE; i++) {
419         if (req & (1 << i)) {
420             if (result.size()) result += ", ";
421             result += string_VkImageViewType(VkImageViewType(i));
422         }
423     }
424 
425     if (!result.size()) result = "(none)";
426 
427     return result;
428 }
429 
430 // Is this sets underlying layout compatible with passed in layout according to "Pipeline Layout Compatibility" in spec?
IsCompatible(DescriptorSetLayout const * const layout,std::string * error) const431 bool cvdescriptorset::DescriptorSet::IsCompatible(DescriptorSetLayout const *const layout, std::string *error) const {
432     return layout->IsCompatible(p_layout_.get(), error);
433 }
434 
435 // Validate that the state of this set is appropriate for the given bindings and dynamic_offsets at Draw time
436 //  This includes validating that all descriptors in the given bindings are updated,
437 //  that any update buffers are valid, and that any dynamic offsets are within the bounds of their buffers.
438 // Return true if state is acceptable, or false and write an error message into error string
ValidateDrawState(const std::map<uint32_t,descriptor_req> & bindings,const std::vector<uint32_t> & dynamic_offsets,GLOBAL_CB_NODE * cb_node,const char * caller,std::string * error) const439 bool cvdescriptorset::DescriptorSet::ValidateDrawState(const std::map<uint32_t, descriptor_req> &bindings,
440                                                        const std::vector<uint32_t> &dynamic_offsets, GLOBAL_CB_NODE *cb_node,
441                                                        const char *caller, std::string *error) const {
442     for (auto binding_pair : bindings) {
443         auto binding = binding_pair.first;
444         if (!p_layout_->HasBinding(binding)) {
445             std::stringstream error_str;
446             error_str << "Attempting to validate DrawState for binding #" << binding
447                       << " which is an invalid binding for this descriptor set.";
448             *error = error_str.str();
449             return false;
450         }
451         IndexRange index_range = p_layout_->GetGlobalIndexRangeFromBinding(binding);
452         auto array_idx = 0;  // Track array idx if we're dealing with array descriptors
453         for (uint32_t i = index_range.start; i < index_range.end; ++i, ++array_idx) {
454             if (!descriptors_[i]->updated) {
455                 std::stringstream error_str;
456                 error_str << "Descriptor in binding #" << binding << " at global descriptor index " << i
457                           << " is being used in draw but has not been updated.";
458                 *error = error_str.str();
459                 return false;
460             } else {
461                 auto descriptor_class = descriptors_[i]->GetClass();
462                 if (descriptor_class == GeneralBuffer) {
463                     // Verify that buffers are valid
464                     auto buffer = static_cast<BufferDescriptor *>(descriptors_[i].get())->GetBuffer();
465                     auto buffer_node = GetBufferState(device_data_, buffer);
466                     if (!buffer_node) {
467                         std::stringstream error_str;
468                         error_str << "Descriptor in binding #" << binding << " at global descriptor index " << i
469                                   << " references invalid buffer " << buffer << ".";
470                         *error = error_str.str();
471                         return false;
472                     } else if (!buffer_node->sparse) {
473                         for (auto mem_binding : buffer_node->GetBoundMemory()) {
474                             if (!GetMemObjInfo(device_data_, mem_binding)) {
475                                 std::stringstream error_str;
476                                 error_str << "Descriptor in binding #" << binding << " at global descriptor index " << i
477                                           << " uses buffer " << buffer << " that references invalid memory " << mem_binding << ".";
478                                 *error = error_str.str();
479                                 return false;
480                             }
481                         }
482                     } else {
483                         // Enqueue sparse resource validation, as these can only be validated at submit time
484                         auto device_data_copy = device_data_;  // Cannot capture members by value, so make capturable copy.
485                         std::function<bool(void)> function = [device_data_copy, caller, buffer_node]() {
486                             return core_validation::ValidateBufferMemoryIsValid(device_data_copy, buffer_node, caller);
487                         };
488                         cb_node->queue_submit_functions.push_back(function);
489                     }
490                     if (descriptors_[i]->IsDynamic()) {
491                         // Validate that dynamic offsets are within the buffer
492                         auto buffer_size = buffer_node->createInfo.size;
493                         auto range = static_cast<BufferDescriptor *>(descriptors_[i].get())->GetRange();
494                         auto desc_offset = static_cast<BufferDescriptor *>(descriptors_[i].get())->GetOffset();
495                         auto dyn_offset = dynamic_offsets[GetDynamicOffsetIndexFromBinding(binding) + array_idx];
496                         if (VK_WHOLE_SIZE == range) {
497                             if ((dyn_offset + desc_offset) > buffer_size) {
498                                 std::stringstream error_str;
499                                 error_str << "Dynamic descriptor in binding #" << binding << " at global descriptor index " << i
500                                           << " uses buffer " << buffer << " with update range of VK_WHOLE_SIZE has dynamic offset "
501                                           << dyn_offset << " combined with offset " << desc_offset
502                                           << " that oversteps the buffer size of " << buffer_size << ".";
503                                 *error = error_str.str();
504                                 return false;
505                             }
506                         } else {
507                             if ((dyn_offset + desc_offset + range) > buffer_size) {
508                                 std::stringstream error_str;
509                                 error_str << "Dynamic descriptor in binding #" << binding << " at global descriptor index " << i
510                                           << " uses buffer " << buffer << " with dynamic offset " << dyn_offset
511                                           << " combined with offset " << desc_offset << " and range " << range
512                                           << " that oversteps the buffer size of " << buffer_size << ".";
513                                 *error = error_str.str();
514                                 return false;
515                             }
516                         }
517                     }
518                 } else if (descriptor_class == ImageSampler || descriptor_class == Image) {
519                     VkImageView image_view;
520                     VkImageLayout image_layout;
521                     if (descriptor_class == ImageSampler) {
522                         image_view = static_cast<ImageSamplerDescriptor *>(descriptors_[i].get())->GetImageView();
523                         image_layout = static_cast<ImageSamplerDescriptor *>(descriptors_[i].get())->GetImageLayout();
524                     } else {
525                         image_view = static_cast<ImageDescriptor *>(descriptors_[i].get())->GetImageView();
526                         image_layout = static_cast<ImageDescriptor *>(descriptors_[i].get())->GetImageLayout();
527                     }
528                     auto reqs = binding_pair.second;
529 
530                     auto image_view_state = GetImageViewState(device_data_, image_view);
531                     if (nullptr == image_view_state) {
532                         // Image view must have been destroyed since initial update. Could potentially flag the descriptor
533                         //  as "invalid" (updated = false) at DestroyImageView() time and detect this error at bind time
534                         std::stringstream error_str;
535                         error_str << "Descriptor in binding #" << binding << " at global descriptor index " << i
536                                   << " is using imageView " << image_view << " that has been destroyed.";
537                         *error = error_str.str();
538                         return false;
539                     }
540                     auto image_view_ci = image_view_state->create_info;
541 
542                     if ((reqs & DESCRIPTOR_REQ_ALL_VIEW_TYPE_BITS) && (~reqs & (1 << image_view_ci.viewType))) {
543                         // bad view type
544                         std::stringstream error_str;
545                         error_str << "Descriptor in binding #" << binding << " at global descriptor index " << i
546                                   << " requires an image view of type " << string_descriptor_req_view_type(reqs) << " but got "
547                                   << string_VkImageViewType(image_view_ci.viewType) << ".";
548                         *error = error_str.str();
549                         return false;
550                     }
551 
552                     auto image_node = GetImageState(device_data_, image_view_ci.image);
553                     assert(image_node);
554                     // Verify Image Layout
555                     // Copy first mip level into sub_layers and loop over each mip level to verify layout
556                     VkImageSubresourceLayers sub_layers;
557                     sub_layers.aspectMask = image_view_ci.subresourceRange.aspectMask;
558                     sub_layers.baseArrayLayer = image_view_ci.subresourceRange.baseArrayLayer;
559                     sub_layers.layerCount = image_view_ci.subresourceRange.layerCount;
560                     bool hit_error = false;
561                     for (auto cur_level = image_view_ci.subresourceRange.baseMipLevel;
562                          cur_level < image_view_ci.subresourceRange.levelCount; ++cur_level) {
563                         sub_layers.mipLevel = cur_level;
564                         VerifyImageLayout(device_data_, cb_node, image_node, sub_layers, image_layout, VK_IMAGE_LAYOUT_UNDEFINED,
565                                           caller, VALIDATION_ERROR_046002b0, &hit_error);
566                         if (hit_error) {
567                             *error =
568                                 "Image layout specified at vkUpdateDescriptorSets() time doesn't match actual image layout at time "
569                                 "descriptor is used. See previous error callback for specific details.";
570                             return false;
571                         }
572                     }
573                     // Verify Sample counts
574                     if ((reqs & DESCRIPTOR_REQ_SINGLE_SAMPLE) && image_node->createInfo.samples != VK_SAMPLE_COUNT_1_BIT) {
575                         std::stringstream error_str;
576                         error_str << "Descriptor in binding #" << binding << " at global descriptor index " << i
577                                   << " requires bound image to have VK_SAMPLE_COUNT_1_BIT but got "
578                                   << string_VkSampleCountFlagBits(image_node->createInfo.samples) << ".";
579                         *error = error_str.str();
580                         return false;
581                     }
582                     if ((reqs & DESCRIPTOR_REQ_MULTI_SAMPLE) && image_node->createInfo.samples == VK_SAMPLE_COUNT_1_BIT) {
583                         std::stringstream error_str;
584                         error_str << "Descriptor in binding #" << binding << " at global descriptor index " << i
585                                   << " requires bound image to have multiple samples, but got VK_SAMPLE_COUNT_1_BIT.";
586                         *error = error_str.str();
587                         return false;
588                     }
589                 }
590             }
591         }
592     }
593     return true;
594 }
595 
596 // For given bindings, place any update buffers or images into the passed-in unordered_sets
GetStorageUpdates(const std::map<uint32_t,descriptor_req> & bindings,std::unordered_set<VkBuffer> * buffer_set,std::unordered_set<VkImageView> * image_set) const597 uint32_t cvdescriptorset::DescriptorSet::GetStorageUpdates(const std::map<uint32_t, descriptor_req> &bindings,
598                                                            std::unordered_set<VkBuffer> *buffer_set,
599                                                            std::unordered_set<VkImageView> *image_set) const {
600     auto num_updates = 0;
601     for (auto binding_pair : bindings) {
602         auto binding = binding_pair.first;
603         // If a binding doesn't exist, skip it
604         if (!p_layout_->HasBinding(binding)) {
605             continue;
606         }
607         uint32_t start_idx = p_layout_->GetGlobalIndexRangeFromBinding(binding).start;
608         if (descriptors_[start_idx]->IsStorage()) {
609             if (Image == descriptors_[start_idx]->descriptor_class) {
610                 for (uint32_t i = 0; i < p_layout_->GetDescriptorCountFromBinding(binding); ++i) {
611                     if (descriptors_[start_idx + i]->updated) {
612                         image_set->insert(static_cast<ImageDescriptor *>(descriptors_[start_idx + i].get())->GetImageView());
613                         num_updates++;
614                     }
615                 }
616             } else if (TexelBuffer == descriptors_[start_idx]->descriptor_class) {
617                 for (uint32_t i = 0; i < p_layout_->GetDescriptorCountFromBinding(binding); ++i) {
618                     if (descriptors_[start_idx + i]->updated) {
619                         auto bufferview = static_cast<TexelDescriptor *>(descriptors_[start_idx + i].get())->GetBufferView();
620                         auto bv_state = GetBufferViewState(device_data_, bufferview);
621                         if (bv_state) {
622                             buffer_set->insert(bv_state->create_info.buffer);
623                             num_updates++;
624                         }
625                     }
626                 }
627             } else if (GeneralBuffer == descriptors_[start_idx]->descriptor_class) {
628                 for (uint32_t i = 0; i < p_layout_->GetDescriptorCountFromBinding(binding); ++i) {
629                     if (descriptors_[start_idx + i]->updated) {
630                         buffer_set->insert(static_cast<BufferDescriptor *>(descriptors_[start_idx + i].get())->GetBuffer());
631                         num_updates++;
632                     }
633                 }
634             }
635         }
636     }
637     return num_updates;
638 }
639 // Set is being deleted or updates so invalidate all bound cmd buffers
InvalidateBoundCmdBuffers()640 void cvdescriptorset::DescriptorSet::InvalidateBoundCmdBuffers() {
641     core_validation::invalidateCommandBuffers(device_data_, cb_bindings, {HandleToUint64(set_), kVulkanObjectTypeDescriptorSet});
642 }
643 // Perform write update in given update struct
PerformWriteUpdate(const VkWriteDescriptorSet * update)644 void cvdescriptorset::DescriptorSet::PerformWriteUpdate(const VkWriteDescriptorSet *update) {
645     // Perform update on a per-binding basis as consecutive updates roll over to next binding
646     auto descriptors_remaining = update->descriptorCount;
647     auto binding_being_updated = update->dstBinding;
648     auto offset = update->dstArrayElement;
649     uint32_t update_index = 0;
650     while (descriptors_remaining) {
651         uint32_t update_count = std::min(descriptors_remaining, GetDescriptorCountFromBinding(binding_being_updated));
652         auto global_idx = p_layout_->GetGlobalIndexRangeFromBinding(binding_being_updated).start + offset;
653         // Loop over the updates for a single binding at a time
654         for (uint32_t di = 0; di < update_count; ++di, ++update_index) {
655             descriptors_[global_idx + di]->WriteUpdate(update, update_index);
656         }
657         // Roll over to next binding in case of consecutive update
658         descriptors_remaining -= update_count;
659         offset = 0;
660         binding_being_updated++;
661     }
662     if (update->descriptorCount) some_update_ = true;
663 
664     InvalidateBoundCmdBuffers();
665 }
666 // Validate Copy update
ValidateCopyUpdate(const debug_report_data * report_data,const VkCopyDescriptorSet * update,const DescriptorSet * src_set,UNIQUE_VALIDATION_ERROR_CODE * error_code,std::string * error_msg)667 bool cvdescriptorset::DescriptorSet::ValidateCopyUpdate(const debug_report_data *report_data, const VkCopyDescriptorSet *update,
668                                                         const DescriptorSet *src_set, UNIQUE_VALIDATION_ERROR_CODE *error_code,
669                                                         std::string *error_msg) {
670     // Verify dst layout still valid
671     if (p_layout_->IsDestroyed()) {
672         *error_code = VALIDATION_ERROR_03207601;
673         string_sprintf(error_msg,
674                        "Cannot call vkUpdateDescriptorSets() to perform copy update on descriptor set dstSet 0x%" PRIxLEAST64
675                        " created with destroyed VkDescriptorSetLayout 0x%" PRIxLEAST64,
676                        HandleToUint64(set_), HandleToUint64(p_layout_->GetDescriptorSetLayout()));
677         return false;
678     }
679 
680     // Verify src layout still valid
681     if (src_set->p_layout_->IsDestroyed()) {
682         *error_code = VALIDATION_ERROR_0322d201;
683         string_sprintf(
684             error_msg,
685             "Cannot call vkUpdateDescriptorSets() to perform copy update of dstSet 0x%" PRIxLEAST64
686             " from descriptor set srcSet 0x%" PRIxLEAST64 " created with destroyed VkDescriptorSetLayout 0x%" PRIxLEAST64,
687             HandleToUint64(set_), HandleToUint64(src_set->set_), HandleToUint64(src_set->p_layout_->GetDescriptorSetLayout()));
688         return false;
689     }
690 
691     // Verify idle ds
692     if (in_use.load()) {
693         // TODO : Re-using Free Idle error code, need copy update idle error code
694         *error_code = VALIDATION_ERROR_2860026a;
695         std::stringstream error_str;
696         error_str << "Cannot call vkUpdateDescriptorSets() to perform copy update on descriptor set " << set_
697                   << " that is in use by a command buffer";
698         *error_msg = error_str.str();
699         return false;
700     }
701     if (!p_layout_->HasBinding(update->dstBinding)) {
702         *error_code = VALIDATION_ERROR_032002b6;
703         std::stringstream error_str;
704         error_str << "DescriptorSet " << set_ << " does not have copy update dest binding of " << update->dstBinding;
705         *error_msg = error_str.str();
706         return false;
707     }
708     if (!src_set->HasBinding(update->srcBinding)) {
709         *error_code = VALIDATION_ERROR_032002b2;
710         std::stringstream error_str;
711         error_str << "DescriptorSet " << set_ << " does not have copy update src binding of " << update->srcBinding;
712         *error_msg = error_str.str();
713         return false;
714     }
715     // src & dst set bindings are valid
716     // Check bounds of src & dst
717     auto src_start_idx = src_set->GetGlobalIndexRangeFromBinding(update->srcBinding).start + update->srcArrayElement;
718     if ((src_start_idx + update->descriptorCount) > src_set->GetTotalDescriptorCount()) {
719         // SRC update out of bounds
720         *error_code = VALIDATION_ERROR_032002b4;
721         std::stringstream error_str;
722         error_str << "Attempting copy update from descriptorSet " << update->srcSet << " binding#" << update->srcBinding
723                   << " with offset index of " << src_set->GetGlobalIndexRangeFromBinding(update->srcBinding).start
724                   << " plus update array offset of " << update->srcArrayElement << " and update of " << update->descriptorCount
725                   << " descriptors oversteps total number of descriptors in set: " << src_set->GetTotalDescriptorCount();
726         *error_msg = error_str.str();
727         return false;
728     }
729     auto dst_start_idx = p_layout_->GetGlobalIndexRangeFromBinding(update->dstBinding).start + update->dstArrayElement;
730     if ((dst_start_idx + update->descriptorCount) > p_layout_->GetTotalDescriptorCount()) {
731         // DST update out of bounds
732         *error_code = VALIDATION_ERROR_032002b8;
733         std::stringstream error_str;
734         error_str << "Attempting copy update to descriptorSet " << set_ << " binding#" << update->dstBinding
735                   << " with offset index of " << p_layout_->GetGlobalIndexRangeFromBinding(update->dstBinding).start
736                   << " plus update array offset of " << update->dstArrayElement << " and update of " << update->descriptorCount
737                   << " descriptors oversteps total number of descriptors in set: " << p_layout_->GetTotalDescriptorCount();
738         *error_msg = error_str.str();
739         return false;
740     }
741     // Check that types match
742     // TODO : Base default error case going from here is VALIDATION_ERROR_0002b8012ba which covers all consistency issues, need more
743     // fine-grained error codes
744     *error_code = VALIDATION_ERROR_032002ba;
745     auto src_type = src_set->GetTypeFromBinding(update->srcBinding);
746     auto dst_type = p_layout_->GetTypeFromBinding(update->dstBinding);
747     if (src_type != dst_type) {
748         std::stringstream error_str;
749         error_str << "Attempting copy update to descriptorSet " << set_ << " binding #" << update->dstBinding << " with type "
750                   << string_VkDescriptorType(dst_type) << " from descriptorSet " << src_set->GetSet() << " binding #"
751                   << update->srcBinding << " with type " << string_VkDescriptorType(src_type) << ". Types do not match";
752         *error_msg = error_str.str();
753         return false;
754     }
755     // Verify consistency of src & dst bindings if update crosses binding boundaries
756     if ((!src_set->GetLayout()->VerifyUpdateConsistency(update->srcBinding, update->srcArrayElement, update->descriptorCount,
757                                                         "copy update from", src_set->GetSet(), error_msg)) ||
758         (!p_layout_->VerifyUpdateConsistency(update->dstBinding, update->dstArrayElement, update->descriptorCount, "copy update to",
759                                              set_, error_msg))) {
760         return false;
761     }
762     // Update parameters all look good and descriptor updated so verify update contents
763     if (!VerifyCopyUpdateContents(update, src_set, src_type, src_start_idx, error_code, error_msg)) return false;
764 
765     // All checks passed so update is good
766     return true;
767 }
768 // Perform Copy update
PerformCopyUpdate(const VkCopyDescriptorSet * update,const DescriptorSet * src_set)769 void cvdescriptorset::DescriptorSet::PerformCopyUpdate(const VkCopyDescriptorSet *update, const DescriptorSet *src_set) {
770     auto src_start_idx = src_set->GetGlobalIndexRangeFromBinding(update->srcBinding).start + update->srcArrayElement;
771     auto dst_start_idx = p_layout_->GetGlobalIndexRangeFromBinding(update->dstBinding).start + update->dstArrayElement;
772     // Update parameters all look good so perform update
773     for (uint32_t di = 0; di < update->descriptorCount; ++di) {
774         auto src = src_set->descriptors_[src_start_idx + di].get();
775         auto dst = descriptors_[dst_start_idx + di].get();
776         if (src->updated) {
777             dst->CopyUpdate(src);
778             some_update_ = true;
779         } else {
780             dst->updated = false;
781         }
782     }
783 
784     InvalidateBoundCmdBuffers();
785 }
786 
787 // Bind cb_node to this set and this set to cb_node.
788 // Prereq: This should be called for a set that has been confirmed to be active for the given cb_node, meaning it's going
789 //   to be used in a draw by the given cb_node
BindCommandBuffer(GLOBAL_CB_NODE * cb_node,const std::map<uint32_t,descriptor_req> & binding_req_map)790 void cvdescriptorset::DescriptorSet::BindCommandBuffer(GLOBAL_CB_NODE *cb_node,
791                                                        const std::map<uint32_t, descriptor_req> &binding_req_map) {
792     // bind cb to this descriptor set
793     cb_bindings.insert(cb_node);
794     // Add bindings for descriptor set, the set's pool, and individual objects in the set
795     cb_node->object_bindings.insert({HandleToUint64(set_), kVulkanObjectTypeDescriptorSet});
796     pool_state_->cb_bindings.insert(cb_node);
797     cb_node->object_bindings.insert({HandleToUint64(pool_state_->pool), kVulkanObjectTypeDescriptorPool});
798     // For the active slots, use set# to look up descriptorSet from boundDescriptorSets, and bind all of that descriptor set's
799     // resources
800     for (auto binding_req_pair : binding_req_map) {
801         auto binding = binding_req_pair.first;
802         auto range = p_layout_->GetGlobalIndexRangeFromBinding(binding);
803         for (uint32_t i = range.start; i < range.end; ++i) {
804             descriptors_[i]->BindCommandBuffer(device_data_, cb_node);
805         }
806     }
807 }
FilterAndTrackOneBindingReq(const BindingReqMap::value_type & binding_req_pair,const BindingReqMap & in_req,BindingReqMap * out_req,TrackedBindings * bindings)808 void cvdescriptorset::DescriptorSet::FilterAndTrackOneBindingReq(const BindingReqMap::value_type &binding_req_pair,
809                                                                  const BindingReqMap &in_req, BindingReqMap *out_req,
810                                                                  TrackedBindings *bindings) {
811     assert(out_req);
812     assert(bindings);
813     const auto binding = binding_req_pair.first;
814     // Use insert and look at the boolean ("was inserted") in the returned pair to see if this is a new set member.
815     // Saves one hash lookup vs. find ... compare w/ end ... insert.
816     const auto it_bool_pair = bindings->insert(binding);
817     if (it_bool_pair.second) {
818         out_req->emplace(binding_req_pair);
819     }
820 }
FilterAndTrackOneBindingReq(const BindingReqMap::value_type & binding_req_pair,const BindingReqMap & in_req,BindingReqMap * out_req,TrackedBindings * bindings,uint32_t limit)821 void cvdescriptorset::DescriptorSet::FilterAndTrackOneBindingReq(const BindingReqMap::value_type &binding_req_pair,
822                                                                  const BindingReqMap &in_req, BindingReqMap *out_req,
823                                                                  TrackedBindings *bindings, uint32_t limit) {
824     if (bindings->size() < limit) FilterAndTrackOneBindingReq(binding_req_pair, in_req, out_req, bindings);
825 }
826 
FilterAndTrackBindingReqs(GLOBAL_CB_NODE * cb_state,const BindingReqMap & in_req,BindingReqMap * out_req)827 void cvdescriptorset::DescriptorSet::FilterAndTrackBindingReqs(GLOBAL_CB_NODE *cb_state, const BindingReqMap &in_req,
828                                                                BindingReqMap *out_req) {
829     TrackedBindings &bound = cached_validation_[cb_state].command_binding_and_usage;
830     if (bound.size() == GetBindingCount()) {
831         return;  // All bindings are bound, out req is empty
832     }
833     for (const auto &binding_req_pair : in_req) {
834         const auto binding = binding_req_pair.first;
835         // If a binding doesn't exist, or has already been bound, skip it
836         if (p_layout_->HasBinding(binding)) {
837             FilterAndTrackOneBindingReq(binding_req_pair, in_req, out_req, &bound);
838         }
839     }
840 }
841 
FilterAndTrackBindingReqs(GLOBAL_CB_NODE * cb_state,PIPELINE_STATE * pipeline,const BindingReqMap & in_req,BindingReqMap * out_req)842 void cvdescriptorset::DescriptorSet::FilterAndTrackBindingReqs(GLOBAL_CB_NODE *cb_state, PIPELINE_STATE *pipeline,
843                                                                const BindingReqMap &in_req, BindingReqMap *out_req) {
844     auto &validated = cached_validation_[cb_state];
845     auto &image_sample_val = validated.image_samplers[pipeline];
846     auto *const dynamic_buffers = &validated.dynamic_buffers;
847     auto *const non_dynamic_buffers = &validated.non_dynamic_buffers;
848     const auto &stats = p_layout_->GetBindingTypeStats();
849     for (const auto &binding_req_pair : in_req) {
850         auto binding = binding_req_pair.first;
851         VkDescriptorSetLayoutBinding const *layout_binding = p_layout_->GetDescriptorSetLayoutBindingPtrFromBinding(binding);
852         if (!layout_binding) {
853             continue;
854         }
855         // Caching criteria differs per type.
856         // If image_layout have changed , the image descriptors need to be validated against them.
857         if ((layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ||
858             (layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) {
859             FilterAndTrackOneBindingReq(binding_req_pair, in_req, out_req, dynamic_buffers, stats.dynamic_buffer_count);
860         } else if ((layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
861                    (layout_binding->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)) {
862             FilterAndTrackOneBindingReq(binding_req_pair, in_req, out_req, non_dynamic_buffers, stats.non_dynamic_buffer_count);
863         } else {
864             // This is rather crude, as the changed layouts may not impact the bound descriptors,
865             // but the simple "versioning" is a simple "dirt" test.
866             auto &version = image_sample_val[binding];  // Take advantage of default construtor zero initialzing new entries
867             if (version != cb_state->image_layout_change_count) {
868                 version = cb_state->image_layout_change_count;
869                 out_req->emplace(binding_req_pair);
870             }
871         }
872     }
873 }
874 
SamplerDescriptor(const VkSampler * immut)875 cvdescriptorset::SamplerDescriptor::SamplerDescriptor(const VkSampler *immut) : sampler_(VK_NULL_HANDLE), immutable_(false) {
876     updated = false;
877     descriptor_class = PlainSampler;
878     if (immut) {
879         sampler_ = *immut;
880         immutable_ = true;
881         updated = true;
882     }
883 }
884 // Validate given sampler. Currently this only checks to make sure it exists in the samplerMap
ValidateSampler(const VkSampler sampler,const layer_data * dev_data)885 bool cvdescriptorset::ValidateSampler(const VkSampler sampler, const layer_data *dev_data) {
886     return (GetSamplerState(dev_data, sampler) != nullptr);
887 }
888 
ValidateImageUpdate(VkImageView image_view,VkImageLayout image_layout,VkDescriptorType type,const layer_data * dev_data,UNIQUE_VALIDATION_ERROR_CODE * error_code,std::string * error_msg)889 bool cvdescriptorset::ValidateImageUpdate(VkImageView image_view, VkImageLayout image_layout, VkDescriptorType type,
890                                           const layer_data *dev_data, UNIQUE_VALIDATION_ERROR_CODE *error_code,
891                                           std::string *error_msg) {
892     // TODO : Defaulting to 00943 for all cases here. Need to create new error codes for various cases.
893     *error_code = VALIDATION_ERROR_15c0028c;
894     auto iv_state = GetImageViewState(dev_data, image_view);
895     if (!iv_state) {
896         std::stringstream error_str;
897         error_str << "Invalid VkImageView: " << image_view;
898         *error_msg = error_str.str();
899         return false;
900     }
901     // Note that when an imageview is created, we validated that memory is bound so no need to re-check here
902     // Validate that imageLayout is compatible with aspect_mask and image format
903     //  and validate that image usage bits are correct for given usage
904     VkImageAspectFlags aspect_mask = iv_state->create_info.subresourceRange.aspectMask;
905     VkImage image = iv_state->create_info.image;
906     VkFormat format = VK_FORMAT_MAX_ENUM;
907     VkImageUsageFlags usage = 0;
908     auto image_node = GetImageState(dev_data, image);
909     if (image_node) {
910         format = image_node->createInfo.format;
911         usage = image_node->createInfo.usage;
912         // Validate that memory is bound to image
913         // TODO: This should have its own valid usage id apart from 2524 which is from CreateImageView case. The only
914         //  the error here occurs is if memory bound to a created imageView has been freed.
915         if (ValidateMemoryIsBoundToImage(dev_data, image_node, "vkUpdateDescriptorSets()", VALIDATION_ERROR_0ac007f8)) {
916             *error_code = VALIDATION_ERROR_0ac007f8;
917             *error_msg = "No memory bound to image.";
918             return false;
919         }
920 
921         // KHR_maintenance1 allows rendering into 2D or 2DArray views which slice a 3D image,
922         // but not binding them to descriptor sets.
923         if (image_node->createInfo.imageType == VK_IMAGE_TYPE_3D &&
924             (iv_state->create_info.viewType == VK_IMAGE_VIEW_TYPE_2D ||
925              iv_state->create_info.viewType == VK_IMAGE_VIEW_TYPE_2D_ARRAY)) {
926             *error_code = VALIDATION_ERROR_046002ae;
927             *error_msg = "ImageView must not be a 2D or 2DArray view of a 3D image";
928             return false;
929         }
930     }
931     // First validate that format and layout are compatible
932     if (format == VK_FORMAT_MAX_ENUM) {
933         std::stringstream error_str;
934         error_str << "Invalid image (" << image << ") in imageView (" << image_view << ").";
935         *error_msg = error_str.str();
936         return false;
937     }
938     // TODO : The various image aspect and format checks here are based on general spec language in 11.5 Image Views section under
939     // vkCreateImageView(). What's the best way to create unique id for these cases?
940     bool ds = FormatIsDepthOrStencil(format);
941     switch (image_layout) {
942         case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
943             // Only Color bit must be set
944             if ((aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) != VK_IMAGE_ASPECT_COLOR_BIT) {
945                 std::stringstream error_str;
946                 error_str
947                     << "ImageView (" << image_view
948                     << ") uses layout VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL but does not have VK_IMAGE_ASPECT_COLOR_BIT set.";
949                 *error_msg = error_str.str();
950                 return false;
951             }
952             // format must NOT be DS
953             if (ds) {
954                 std::stringstream error_str;
955                 error_str << "ImageView (" << image_view
956                           << ") uses layout VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL but the image format is "
957                           << string_VkFormat(format) << " which is not a color format.";
958                 *error_msg = error_str.str();
959                 return false;
960             }
961             break;
962         case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
963         case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
964             // Depth or stencil bit must be set, but both must NOT be set
965             if (aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) {
966                 if (aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) {
967                     // both  must NOT be set
968                     std::stringstream error_str;
969                     error_str << "ImageView (" << image_view << ") has both STENCIL and DEPTH aspects set";
970                     *error_msg = error_str.str();
971                     return false;
972                 }
973             } else if (!(aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT)) {
974                 // Neither were set
975                 std::stringstream error_str;
976                 error_str << "ImageView (" << image_view << ") has layout " << string_VkImageLayout(image_layout)
977                           << " but does not have STENCIL or DEPTH aspects set";
978                 *error_msg = error_str.str();
979                 return false;
980             }
981             // format must be DS
982             if (!ds) {
983                 std::stringstream error_str;
984                 error_str << "ImageView (" << image_view << ") has layout " << string_VkImageLayout(image_layout)
985                           << " but the image format is " << string_VkFormat(format) << " which is not a depth/stencil format.";
986                 *error_msg = error_str.str();
987                 return false;
988             }
989             break;
990         default:
991             // For other layouts if the source is depth/stencil image, both aspect bits must not be set
992             if (ds) {
993                 if (aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) {
994                     if (aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) {
995                         // both  must NOT be set
996                         std::stringstream error_str;
997                         error_str << "ImageView (" << image_view << ") has layout " << string_VkImageLayout(image_layout)
998                                   << " and is using depth/stencil image of format " << string_VkFormat(format)
999                                   << " but it has both STENCIL and DEPTH aspects set, which is illegal. When using a depth/stencil "
1000                                      "image in a descriptor set, please only set either VK_IMAGE_ASPECT_DEPTH_BIT or "
1001                                      "VK_IMAGE_ASPECT_STENCIL_BIT depending on whether it will be used for depth reads or stencil "
1002                                      "reads respectively.";
1003                         *error_msg = error_str.str();
1004                         return false;
1005                     }
1006                 }
1007             }
1008             break;
1009     }
1010     // Now validate that usage flags are correctly set for given type of update
1011     //  As we're switching per-type, if any type has specific layout requirements, check those here as well
1012     // TODO : The various image usage bit requirements are in general spec language for VkImageUsageFlags bit block in 11.3 Images
1013     // under vkCreateImage()
1014     // TODO : Need to also validate case VALIDATION_ERROR_15c002a0 where STORAGE_IMAGE & INPUT_ATTACH types must have been created
1015     // with identify swizzle
1016     std::string error_usage_bit;
1017     switch (type) {
1018         case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1019         case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
1020             if (!(usage & VK_IMAGE_USAGE_SAMPLED_BIT)) {
1021                 error_usage_bit = "VK_IMAGE_USAGE_SAMPLED_BIT";
1022             }
1023             break;
1024         }
1025         case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: {
1026             if (!(usage & VK_IMAGE_USAGE_STORAGE_BIT)) {
1027                 error_usage_bit = "VK_IMAGE_USAGE_STORAGE_BIT";
1028             } else if (VK_IMAGE_LAYOUT_GENERAL != image_layout) {
1029                 std::stringstream error_str;
1030                 // TODO : Need to create custom enum error codes for these cases
1031                 if (image_node->shared_presentable) {
1032                     if (VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR != image_layout) {
1033                         error_str << "ImageView (" << image_view
1034                                   << ") of VK_DESCRIPTOR_TYPE_STORAGE_IMAGE type with a front-buffered image is being updated with "
1035                                      "layout "
1036                                   << string_VkImageLayout(image_layout)
1037                                   << " but according to spec section 13.1 Descriptor Types, 'Front-buffered images that report "
1038                                      "support for VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT must be in the "
1039                                      "VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR layout.'";
1040                         *error_msg = error_str.str();
1041                         return false;
1042                     }
1043                 } else if (VK_IMAGE_LAYOUT_GENERAL != image_layout) {
1044                     error_str << "ImageView (" << image_view
1045                               << ") of VK_DESCRIPTOR_TYPE_STORAGE_IMAGE type is being updated with layout "
1046                               << string_VkImageLayout(image_layout)
1047                               << " but according to spec section 13.1 Descriptor Types, 'Load and store operations on storage "
1048                                  "images can only be done on images in VK_IMAGE_LAYOUT_GENERAL layout.'";
1049                     *error_msg = error_str.str();
1050                     return false;
1051                 }
1052             }
1053             break;
1054         }
1055         case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
1056             if (!(usage & VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT)) {
1057                 error_usage_bit = "VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT";
1058             }
1059             break;
1060         }
1061         default:
1062             break;
1063     }
1064     if (!error_usage_bit.empty()) {
1065         std::stringstream error_str;
1066         error_str << "ImageView (" << image_view << ") with usage mask 0x" << usage
1067                   << " being used for a descriptor update of type " << string_VkDescriptorType(type) << " does not have "
1068                   << error_usage_bit << " set.";
1069         *error_msg = error_str.str();
1070         return false;
1071     }
1072     return true;
1073 }
1074 
WriteUpdate(const VkWriteDescriptorSet * update,const uint32_t index)1075 void cvdescriptorset::SamplerDescriptor::WriteUpdate(const VkWriteDescriptorSet *update, const uint32_t index) {
1076     sampler_ = update->pImageInfo[index].sampler;
1077     updated = true;
1078 }
1079 
CopyUpdate(const Descriptor * src)1080 void cvdescriptorset::SamplerDescriptor::CopyUpdate(const Descriptor *src) {
1081     if (!immutable_) {
1082         auto update_sampler = static_cast<const SamplerDescriptor *>(src)->sampler_;
1083         sampler_ = update_sampler;
1084     }
1085     updated = true;
1086 }
1087 
BindCommandBuffer(const layer_data * dev_data,GLOBAL_CB_NODE * cb_node)1088 void cvdescriptorset::SamplerDescriptor::BindCommandBuffer(const layer_data *dev_data, GLOBAL_CB_NODE *cb_node) {
1089     if (!immutable_) {
1090         auto sampler_state = GetSamplerState(dev_data, sampler_);
1091         if (sampler_state) core_validation::AddCommandBufferBindingSampler(cb_node, sampler_state);
1092     }
1093 }
1094 
ImageSamplerDescriptor(const VkSampler * immut)1095 cvdescriptorset::ImageSamplerDescriptor::ImageSamplerDescriptor(const VkSampler *immut)
1096     : sampler_(VK_NULL_HANDLE), immutable_(false), image_view_(VK_NULL_HANDLE), image_layout_(VK_IMAGE_LAYOUT_UNDEFINED) {
1097     updated = false;
1098     descriptor_class = ImageSampler;
1099     if (immut) {
1100         sampler_ = *immut;
1101         immutable_ = true;
1102     }
1103 }
1104 
WriteUpdate(const VkWriteDescriptorSet * update,const uint32_t index)1105 void cvdescriptorset::ImageSamplerDescriptor::WriteUpdate(const VkWriteDescriptorSet *update, const uint32_t index) {
1106     updated = true;
1107     const auto &image_info = update->pImageInfo[index];
1108     sampler_ = image_info.sampler;
1109     image_view_ = image_info.imageView;
1110     image_layout_ = image_info.imageLayout;
1111 }
1112 
CopyUpdate(const Descriptor * src)1113 void cvdescriptorset::ImageSamplerDescriptor::CopyUpdate(const Descriptor *src) {
1114     if (!immutable_) {
1115         auto update_sampler = static_cast<const ImageSamplerDescriptor *>(src)->sampler_;
1116         sampler_ = update_sampler;
1117     }
1118     auto image_view = static_cast<const ImageSamplerDescriptor *>(src)->image_view_;
1119     auto image_layout = static_cast<const ImageSamplerDescriptor *>(src)->image_layout_;
1120     updated = true;
1121     image_view_ = image_view;
1122     image_layout_ = image_layout;
1123 }
1124 
BindCommandBuffer(const layer_data * dev_data,GLOBAL_CB_NODE * cb_node)1125 void cvdescriptorset::ImageSamplerDescriptor::BindCommandBuffer(const layer_data *dev_data, GLOBAL_CB_NODE *cb_node) {
1126     // First add binding for any non-immutable sampler
1127     if (!immutable_) {
1128         auto sampler_state = GetSamplerState(dev_data, sampler_);
1129         if (sampler_state) core_validation::AddCommandBufferBindingSampler(cb_node, sampler_state);
1130     }
1131     // Add binding for image
1132     auto iv_state = GetImageViewState(dev_data, image_view_);
1133     if (iv_state) {
1134         core_validation::AddCommandBufferBindingImageView(dev_data, cb_node, iv_state);
1135     }
1136 }
1137 
ImageDescriptor(const VkDescriptorType type)1138 cvdescriptorset::ImageDescriptor::ImageDescriptor(const VkDescriptorType type)
1139     : storage_(false), image_view_(VK_NULL_HANDLE), image_layout_(VK_IMAGE_LAYOUT_UNDEFINED) {
1140     updated = false;
1141     descriptor_class = Image;
1142     if (VK_DESCRIPTOR_TYPE_STORAGE_IMAGE == type) storage_ = true;
1143 }
1144 
WriteUpdate(const VkWriteDescriptorSet * update,const uint32_t index)1145 void cvdescriptorset::ImageDescriptor::WriteUpdate(const VkWriteDescriptorSet *update, const uint32_t index) {
1146     updated = true;
1147     const auto &image_info = update->pImageInfo[index];
1148     image_view_ = image_info.imageView;
1149     image_layout_ = image_info.imageLayout;
1150 }
1151 
CopyUpdate(const Descriptor * src)1152 void cvdescriptorset::ImageDescriptor::CopyUpdate(const Descriptor *src) {
1153     auto image_view = static_cast<const ImageDescriptor *>(src)->image_view_;
1154     auto image_layout = static_cast<const ImageDescriptor *>(src)->image_layout_;
1155     updated = true;
1156     image_view_ = image_view;
1157     image_layout_ = image_layout;
1158 }
1159 
BindCommandBuffer(const layer_data * dev_data,GLOBAL_CB_NODE * cb_node)1160 void cvdescriptorset::ImageDescriptor::BindCommandBuffer(const layer_data *dev_data, GLOBAL_CB_NODE *cb_node) {
1161     // Add binding for image
1162     auto iv_state = GetImageViewState(dev_data, image_view_);
1163     if (iv_state) {
1164         core_validation::AddCommandBufferBindingImageView(dev_data, cb_node, iv_state);
1165     }
1166 }
1167 
BufferDescriptor(const VkDescriptorType type)1168 cvdescriptorset::BufferDescriptor::BufferDescriptor(const VkDescriptorType type)
1169     : storage_(false), dynamic_(false), buffer_(VK_NULL_HANDLE), offset_(0), range_(0) {
1170     updated = false;
1171     descriptor_class = GeneralBuffer;
1172     if (VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC == type) {
1173         dynamic_ = true;
1174     } else if (VK_DESCRIPTOR_TYPE_STORAGE_BUFFER == type) {
1175         storage_ = true;
1176     } else if (VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC == type) {
1177         dynamic_ = true;
1178         storage_ = true;
1179     }
1180 }
WriteUpdate(const VkWriteDescriptorSet * update,const uint32_t index)1181 void cvdescriptorset::BufferDescriptor::WriteUpdate(const VkWriteDescriptorSet *update, const uint32_t index) {
1182     updated = true;
1183     const auto &buffer_info = update->pBufferInfo[index];
1184     buffer_ = buffer_info.buffer;
1185     offset_ = buffer_info.offset;
1186     range_ = buffer_info.range;
1187 }
1188 
CopyUpdate(const Descriptor * src)1189 void cvdescriptorset::BufferDescriptor::CopyUpdate(const Descriptor *src) {
1190     auto buff_desc = static_cast<const BufferDescriptor *>(src);
1191     updated = true;
1192     buffer_ = buff_desc->buffer_;
1193     offset_ = buff_desc->offset_;
1194     range_ = buff_desc->range_;
1195 }
1196 
BindCommandBuffer(const layer_data * dev_data,GLOBAL_CB_NODE * cb_node)1197 void cvdescriptorset::BufferDescriptor::BindCommandBuffer(const layer_data *dev_data, GLOBAL_CB_NODE *cb_node) {
1198     auto buffer_node = GetBufferState(dev_data, buffer_);
1199     if (buffer_node) core_validation::AddCommandBufferBindingBuffer(dev_data, cb_node, buffer_node);
1200 }
1201 
TexelDescriptor(const VkDescriptorType type)1202 cvdescriptorset::TexelDescriptor::TexelDescriptor(const VkDescriptorType type) : buffer_view_(VK_NULL_HANDLE), storage_(false) {
1203     updated = false;
1204     descriptor_class = TexelBuffer;
1205     if (VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER == type) storage_ = true;
1206 }
1207 
WriteUpdate(const VkWriteDescriptorSet * update,const uint32_t index)1208 void cvdescriptorset::TexelDescriptor::WriteUpdate(const VkWriteDescriptorSet *update, const uint32_t index) {
1209     updated = true;
1210     buffer_view_ = update->pTexelBufferView[index];
1211 }
1212 
CopyUpdate(const Descriptor * src)1213 void cvdescriptorset::TexelDescriptor::CopyUpdate(const Descriptor *src) {
1214     updated = true;
1215     buffer_view_ = static_cast<const TexelDescriptor *>(src)->buffer_view_;
1216 }
1217 
BindCommandBuffer(const layer_data * dev_data,GLOBAL_CB_NODE * cb_node)1218 void cvdescriptorset::TexelDescriptor::BindCommandBuffer(const layer_data *dev_data, GLOBAL_CB_NODE *cb_node) {
1219     auto bv_state = GetBufferViewState(dev_data, buffer_view_);
1220     if (bv_state) {
1221         core_validation::AddCommandBufferBindingBufferView(dev_data, cb_node, bv_state);
1222     }
1223 }
1224 
1225 // This is a helper function that iterates over a set of Write and Copy updates, pulls the DescriptorSet* for updated
1226 //  sets, and then calls their respective Validate[Write|Copy]Update functions.
1227 // If the update hits an issue for which the callback returns "true", meaning that the call down the chain should
1228 //  be skipped, then true is returned.
1229 // If there is no issue with the update, then false is returned.
ValidateUpdateDescriptorSets(const debug_report_data * report_data,const layer_data * dev_data,uint32_t write_count,const VkWriteDescriptorSet * p_wds,uint32_t copy_count,const VkCopyDescriptorSet * p_cds)1230 bool cvdescriptorset::ValidateUpdateDescriptorSets(const debug_report_data *report_data, const layer_data *dev_data,
1231                                                    uint32_t write_count, const VkWriteDescriptorSet *p_wds, uint32_t copy_count,
1232                                                    const VkCopyDescriptorSet *p_cds) {
1233     bool skip = false;
1234     // Validate Write updates
1235     for (uint32_t i = 0; i < write_count; i++) {
1236         auto dest_set = p_wds[i].dstSet;
1237         auto set_node = core_validation::GetSetNode(dev_data, dest_set);
1238         if (!set_node) {
1239             skip |=
1240                 log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
1241                         HandleToUint64(dest_set), __LINE__, DRAWSTATE_INVALID_DESCRIPTOR_SET, "DS",
1242                         "Cannot call vkUpdateDescriptorSets() on descriptor set 0x%" PRIxLEAST64 " that has not been allocated.",
1243                         HandleToUint64(dest_set));
1244         } else {
1245             UNIQUE_VALIDATION_ERROR_CODE error_code;
1246             std::string error_str;
1247             if (!set_node->ValidateWriteUpdate(report_data, &p_wds[i], &error_code, &error_str)) {
1248                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
1249                                 HandleToUint64(dest_set), __LINE__, error_code, "DS",
1250                                 "vkUpdateDescriptorSets() failed write update validation for Descriptor Set 0x%" PRIx64
1251                                 " with error: %s. %s",
1252                                 HandleToUint64(dest_set), error_str.c_str(), validation_error_map[error_code]);
1253             }
1254         }
1255     }
1256     // Now validate copy updates
1257     for (uint32_t i = 0; i < copy_count; ++i) {
1258         auto dst_set = p_cds[i].dstSet;
1259         auto src_set = p_cds[i].srcSet;
1260         auto src_node = core_validation::GetSetNode(dev_data, src_set);
1261         auto dst_node = core_validation::GetSetNode(dev_data, dst_set);
1262         // Object_tracker verifies that src & dest descriptor set are valid
1263         assert(src_node);
1264         assert(dst_node);
1265         UNIQUE_VALIDATION_ERROR_CODE error_code;
1266         std::string error_str;
1267         if (!dst_node->ValidateCopyUpdate(report_data, &p_cds[i], src_node, &error_code, &error_str)) {
1268             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
1269                             HandleToUint64(dst_set), __LINE__, error_code, "DS",
1270                             "vkUpdateDescriptorSets() failed copy update from Descriptor Set 0x%" PRIx64
1271                             " to Descriptor Set 0x%" PRIx64 " with error: %s. %s",
1272                             HandleToUint64(src_set), HandleToUint64(dst_set), error_str.c_str(), validation_error_map[error_code]);
1273         }
1274     }
1275     return skip;
1276 }
1277 // This is a helper function that iterates over a set of Write and Copy updates, pulls the DescriptorSet* for updated
1278 //  sets, and then calls their respective Perform[Write|Copy]Update functions.
1279 // Prerequisite : ValidateUpdateDescriptorSets() should be called and return "false" prior to calling PerformUpdateDescriptorSets()
1280 //  with the same set of updates.
1281 // This is split from the validate code to allow validation prior to calling down the chain, and then update after
1282 //  calling down the chain.
PerformUpdateDescriptorSets(const layer_data * dev_data,uint32_t write_count,const VkWriteDescriptorSet * p_wds,uint32_t copy_count,const VkCopyDescriptorSet * p_cds)1283 void cvdescriptorset::PerformUpdateDescriptorSets(const layer_data *dev_data, uint32_t write_count,
1284                                                   const VkWriteDescriptorSet *p_wds, uint32_t copy_count,
1285                                                   const VkCopyDescriptorSet *p_cds) {
1286     // Write updates first
1287     uint32_t i = 0;
1288     for (i = 0; i < write_count; ++i) {
1289         auto dest_set = p_wds[i].dstSet;
1290         auto set_node = core_validation::GetSetNode(dev_data, dest_set);
1291         if (set_node) {
1292             set_node->PerformWriteUpdate(&p_wds[i]);
1293         }
1294     }
1295     // Now copy updates
1296     for (i = 0; i < copy_count; ++i) {
1297         auto dst_set = p_cds[i].dstSet;
1298         auto src_set = p_cds[i].srcSet;
1299         auto src_node = core_validation::GetSetNode(dev_data, src_set);
1300         auto dst_node = core_validation::GetSetNode(dev_data, dst_set);
1301         if (src_node && dst_node) {
1302             dst_node->PerformCopyUpdate(&p_cds[i], src_node);
1303         }
1304     }
1305 }
1306 // This helper function carries out the state updates for descriptor updates peformed via update templates. It basically collects
1307 // data and leverages the PerformUpdateDescriptor helper functions to do this.
PerformUpdateDescriptorSetsWithTemplateKHR(layer_data * device_data,VkDescriptorSet descriptorSet,std::unique_ptr<TEMPLATE_STATE> const & template_state,const void * pData)1308 void cvdescriptorset::PerformUpdateDescriptorSetsWithTemplateKHR(layer_data *device_data, VkDescriptorSet descriptorSet,
1309                                                                  std::unique_ptr<TEMPLATE_STATE> const &template_state,
1310                                                                  const void *pData) {
1311     auto const &create_info = template_state->create_info;
1312 
1313     // Create a vector of write structs
1314     std::vector<VkWriteDescriptorSet> desc_writes;
1315     auto layout_obj = GetDescriptorSetLayout(device_data, create_info.descriptorSetLayout);
1316 
1317     // Create a WriteDescriptorSet struct for each template update entry
1318     for (uint32_t i = 0; i < create_info.descriptorUpdateEntryCount; i++) {
1319         auto binding_count = layout_obj->GetDescriptorCountFromBinding(create_info.pDescriptorUpdateEntries[i].dstBinding);
1320         auto binding_being_updated = create_info.pDescriptorUpdateEntries[i].dstBinding;
1321         auto dst_array_element = create_info.pDescriptorUpdateEntries[i].dstArrayElement;
1322 
1323         desc_writes.reserve(desc_writes.size() + create_info.pDescriptorUpdateEntries[i].descriptorCount);
1324         for (uint32_t j = 0; j < create_info.pDescriptorUpdateEntries[i].descriptorCount; j++) {
1325             desc_writes.emplace_back();
1326             auto &write_entry = desc_writes.back();
1327 
1328             size_t offset = create_info.pDescriptorUpdateEntries[i].offset + j * create_info.pDescriptorUpdateEntries[i].stride;
1329             char *update_entry = (char *)(pData) + offset;
1330 
1331             if (dst_array_element >= binding_count) {
1332                 dst_array_element = 0;
1333                 binding_being_updated = layout_obj->GetNextValidBinding(binding_being_updated);
1334             }
1335 
1336             write_entry.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
1337             write_entry.pNext = NULL;
1338             write_entry.dstSet = descriptorSet;
1339             write_entry.dstBinding = binding_being_updated;
1340             write_entry.dstArrayElement = dst_array_element;
1341             write_entry.descriptorCount = 1;
1342             write_entry.descriptorType = create_info.pDescriptorUpdateEntries[i].descriptorType;
1343 
1344             switch (create_info.pDescriptorUpdateEntries[i].descriptorType) {
1345                 case VK_DESCRIPTOR_TYPE_SAMPLER:
1346                 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1347                 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1348                 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1349                 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1350                     write_entry.pImageInfo = reinterpret_cast<VkDescriptorImageInfo *>(update_entry);
1351                     break;
1352 
1353                 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1354                 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1355                 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1356                 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1357                     write_entry.pBufferInfo = reinterpret_cast<VkDescriptorBufferInfo *>(update_entry);
1358                     break;
1359 
1360                 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1361                 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1362                     write_entry.pTexelBufferView = reinterpret_cast<VkBufferView *>(update_entry);
1363                     break;
1364                 default:
1365                     assert(0);
1366                     break;
1367             }
1368             dst_array_element++;
1369         }
1370     }
1371     PerformUpdateDescriptorSets(device_data, static_cast<uint32_t>(desc_writes.size()), desc_writes.data(), 0, NULL);
1372 }
1373 // Validate the state for a given write update but don't actually perform the update
1374 //  If an error would occur for this update, return false and fill in details in error_msg string
ValidateWriteUpdate(const debug_report_data * report_data,const VkWriteDescriptorSet * update,UNIQUE_VALIDATION_ERROR_CODE * error_code,std::string * error_msg)1375 bool cvdescriptorset::DescriptorSet::ValidateWriteUpdate(const debug_report_data *report_data, const VkWriteDescriptorSet *update,
1376                                                          UNIQUE_VALIDATION_ERROR_CODE *error_code, std::string *error_msg) {
1377     // Verify dst layout still valid
1378     if (p_layout_->IsDestroyed()) {
1379         *error_code = VALIDATION_ERROR_15c00280;
1380         string_sprintf(error_msg,
1381                        "Cannot call vkUpdateDescriptorSets() to perform write update on descriptor set 0x%" PRIxLEAST64
1382                        " created with destroyed VkDescriptorSetLayout 0x%" PRIxLEAST64,
1383                        HandleToUint64(set_), HandleToUint64(p_layout_->GetDescriptorSetLayout()));
1384         return false;
1385     }
1386     // Verify idle ds
1387     if (in_use.load()) {
1388         // TODO : Re-using Free Idle error code, need write update idle error code
1389         *error_code = VALIDATION_ERROR_2860026a;
1390         std::stringstream error_str;
1391         error_str << "Cannot call vkUpdateDescriptorSets() to perform write update on descriptor set " << set_
1392                   << " that is in use by a command buffer";
1393         *error_msg = error_str.str();
1394         return false;
1395     }
1396     // Verify dst binding exists
1397     if (!p_layout_->HasBinding(update->dstBinding)) {
1398         *error_code = VALIDATION_ERROR_15c00276;
1399         std::stringstream error_str;
1400         error_str << "DescriptorSet " << set_ << " does not have binding " << update->dstBinding;
1401         *error_msg = error_str.str();
1402         return false;
1403     } else {
1404         // Make sure binding isn't empty
1405         if (0 == p_layout_->GetDescriptorCountFromBinding(update->dstBinding)) {
1406             *error_code = VALIDATION_ERROR_15c00278;
1407             std::stringstream error_str;
1408             error_str << "DescriptorSet " << set_ << " cannot updated binding " << update->dstBinding << " that has 0 descriptors";
1409             *error_msg = error_str.str();
1410             return false;
1411         }
1412     }
1413     // We know that binding is valid, verify update and do update on each descriptor
1414     auto start_idx = p_layout_->GetGlobalIndexRangeFromBinding(update->dstBinding).start + update->dstArrayElement;
1415     auto type = p_layout_->GetTypeFromBinding(update->dstBinding);
1416     if (type != update->descriptorType) {
1417         *error_code = VALIDATION_ERROR_15c0027e;
1418         std::stringstream error_str;
1419         error_str << "Attempting write update to descriptor set " << set_ << " binding #" << update->dstBinding << " with type "
1420                   << string_VkDescriptorType(type) << " but update type is " << string_VkDescriptorType(update->descriptorType);
1421         *error_msg = error_str.str();
1422         return false;
1423     }
1424     if (update->descriptorCount > (descriptors_.size() - start_idx)) {
1425         *error_code = VALIDATION_ERROR_15c00282;
1426         std::stringstream error_str;
1427         error_str << "Attempting write update to descriptor set " << set_ << " binding #" << update->dstBinding << " with "
1428                   << descriptors_.size() - start_idx
1429                   << " descriptors in that binding and all successive bindings of the set, but update of "
1430                   << update->descriptorCount << " descriptors combined with update array element offset of "
1431                   << update->dstArrayElement << " oversteps the available number of consecutive descriptors";
1432         *error_msg = error_str.str();
1433         return false;
1434     }
1435     // Verify consecutive bindings match (if needed)
1436     if (!p_layout_->VerifyUpdateConsistency(update->dstBinding, update->dstArrayElement, update->descriptorCount, "write update to",
1437                                             set_, error_msg)) {
1438         // TODO : Should break out "consecutive binding updates" language into valid usage statements
1439         *error_code = VALIDATION_ERROR_15c00282;
1440         return false;
1441     }
1442     // Update is within bounds and consistent so last step is to validate update contents
1443     if (!VerifyWriteUpdateContents(update, start_idx, error_code, error_msg)) {
1444         std::stringstream error_str;
1445         error_str << "Write update to descriptor in set " << set_ << " binding #" << update->dstBinding
1446                   << " failed with error message: " << error_msg->c_str();
1447         *error_msg = error_str.str();
1448         return false;
1449     }
1450     // All checks passed, update is clean
1451     return true;
1452 }
1453 // For the given buffer, verify that its creation parameters are appropriate for the given type
1454 //  If there's an error, update the error_msg string with details and return false, else return true
ValidateBufferUsage(BUFFER_STATE const * buffer_node,VkDescriptorType type,UNIQUE_VALIDATION_ERROR_CODE * error_code,std::string * error_msg) const1455 bool cvdescriptorset::DescriptorSet::ValidateBufferUsage(BUFFER_STATE const *buffer_node, VkDescriptorType type,
1456                                                          UNIQUE_VALIDATION_ERROR_CODE *error_code, std::string *error_msg) const {
1457     // Verify that usage bits set correctly for given type
1458     auto usage = buffer_node->createInfo.usage;
1459     std::string error_usage_bit;
1460     switch (type) {
1461         case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1462             if (!(usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT)) {
1463                 *error_code = VALIDATION_ERROR_15c0029c;
1464                 error_usage_bit = "VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT";
1465             }
1466             break;
1467         case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1468             if (!(usage & VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT)) {
1469                 *error_code = VALIDATION_ERROR_15c0029e;
1470                 error_usage_bit = "VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT";
1471             }
1472             break;
1473         case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1474         case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1475             if (!(usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT)) {
1476                 *error_code = VALIDATION_ERROR_15c00292;
1477                 error_usage_bit = "VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT";
1478             }
1479             break;
1480         case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1481         case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1482             if (!(usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT)) {
1483                 *error_code = VALIDATION_ERROR_15c00296;
1484                 error_usage_bit = "VK_BUFFER_USAGE_STORAGE_BUFFER_BIT";
1485             }
1486             break;
1487         default:
1488             break;
1489     }
1490     if (!error_usage_bit.empty()) {
1491         std::stringstream error_str;
1492         error_str << "Buffer (" << buffer_node->buffer << ") with usage mask 0x" << usage
1493                   << " being used for a descriptor update of type " << string_VkDescriptorType(type) << " does not have "
1494                   << error_usage_bit << " set.";
1495         *error_msg = error_str.str();
1496         return false;
1497     }
1498     return true;
1499 }
1500 // For buffer descriptor updates, verify the buffer usage and VkDescriptorBufferInfo struct which includes:
1501 //  1. buffer is valid
1502 //  2. buffer was created with correct usage flags
1503 //  3. offset is less than buffer size
1504 //  4. range is either VK_WHOLE_SIZE or falls in (0, (buffer size - offset)]
1505 //  5. range and offset are within the device's limits
1506 // If there's an error, update the error_msg string with details and return false, else return true
ValidateBufferUpdate(VkDescriptorBufferInfo const * buffer_info,VkDescriptorType type,UNIQUE_VALIDATION_ERROR_CODE * error_code,std::string * error_msg) const1507 bool cvdescriptorset::DescriptorSet::ValidateBufferUpdate(VkDescriptorBufferInfo const *buffer_info, VkDescriptorType type,
1508                                                           UNIQUE_VALIDATION_ERROR_CODE *error_code, std::string *error_msg) const {
1509     // First make sure that buffer is valid
1510     auto buffer_node = GetBufferState(device_data_, buffer_info->buffer);
1511     // Any invalid buffer should already be caught by object_tracker
1512     assert(buffer_node);
1513     if (ValidateMemoryIsBoundToBuffer(device_data_, buffer_node, "vkUpdateDescriptorSets()", VALIDATION_ERROR_15c00294)) {
1514         *error_code = VALIDATION_ERROR_15c00294;
1515         *error_msg = "No memory bound to buffer.";
1516         return false;
1517     }
1518     // Verify usage bits
1519     if (!ValidateBufferUsage(buffer_node, type, error_code, error_msg)) {
1520         // error_msg will have been updated by ValidateBufferUsage()
1521         return false;
1522     }
1523     // offset must be less than buffer size
1524     if (buffer_info->offset >= buffer_node->createInfo.size) {
1525         *error_code = VALIDATION_ERROR_044002a8;
1526         std::stringstream error_str;
1527         error_str << "VkDescriptorBufferInfo offset of " << buffer_info->offset << " is greater than or equal to buffer "
1528                   << buffer_node->buffer << " size of " << buffer_node->createInfo.size;
1529         *error_msg = error_str.str();
1530         return false;
1531     }
1532     if (buffer_info->range != VK_WHOLE_SIZE) {
1533         // Range must be VK_WHOLE_SIZE or > 0
1534         if (!buffer_info->range) {
1535             *error_code = VALIDATION_ERROR_044002aa;
1536             std::stringstream error_str;
1537             error_str << "VkDescriptorBufferInfo range is not VK_WHOLE_SIZE and is zero, which is not allowed.";
1538             *error_msg = error_str.str();
1539             return false;
1540         }
1541         // Range must be VK_WHOLE_SIZE or <= (buffer size - offset)
1542         if (buffer_info->range > (buffer_node->createInfo.size - buffer_info->offset)) {
1543             *error_code = VALIDATION_ERROR_044002ac;
1544             std::stringstream error_str;
1545             error_str << "VkDescriptorBufferInfo range is " << buffer_info->range << " which is greater than buffer size ("
1546                       << buffer_node->createInfo.size << ") minus requested offset of " << buffer_info->offset;
1547             *error_msg = error_str.str();
1548             return false;
1549         }
1550     }
1551     // Check buffer update sizes against device limits
1552     if (VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER == type || VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC == type) {
1553         auto max_ub_range = limits_.maxUniformBufferRange;
1554         // TODO : If range is WHOLE_SIZE, need to make sure underlying buffer size doesn't exceed device max
1555         if (buffer_info->range != VK_WHOLE_SIZE && buffer_info->range > max_ub_range) {
1556             *error_code = VALIDATION_ERROR_15c00298;
1557             std::stringstream error_str;
1558             error_str << "VkDescriptorBufferInfo range is " << buffer_info->range
1559                       << " which is greater than this device's maxUniformBufferRange (" << max_ub_range << ")";
1560             *error_msg = error_str.str();
1561             return false;
1562         }
1563     } else if (VK_DESCRIPTOR_TYPE_STORAGE_BUFFER == type || VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC == type) {
1564         auto max_sb_range = limits_.maxStorageBufferRange;
1565         // TODO : If range is WHOLE_SIZE, need to make sure underlying buffer size doesn't exceed device max
1566         if (buffer_info->range != VK_WHOLE_SIZE && buffer_info->range > max_sb_range) {
1567             *error_code = VALIDATION_ERROR_15c0029a;
1568             std::stringstream error_str;
1569             error_str << "VkDescriptorBufferInfo range is " << buffer_info->range
1570                       << " which is greater than this device's maxStorageBufferRange (" << max_sb_range << ")";
1571             *error_msg = error_str.str();
1572             return false;
1573         }
1574     }
1575     return true;
1576 }
1577 
1578 // Verify that the contents of the update are ok, but don't perform actual update
VerifyWriteUpdateContents(const VkWriteDescriptorSet * update,const uint32_t index,UNIQUE_VALIDATION_ERROR_CODE * error_code,std::string * error_msg) const1579 bool cvdescriptorset::DescriptorSet::VerifyWriteUpdateContents(const VkWriteDescriptorSet *update, const uint32_t index,
1580                                                                UNIQUE_VALIDATION_ERROR_CODE *error_code,
1581                                                                std::string *error_msg) const {
1582     switch (update->descriptorType) {
1583         case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
1584             for (uint32_t di = 0; di < update->descriptorCount; ++di) {
1585                 // Validate image
1586                 auto image_view = update->pImageInfo[di].imageView;
1587                 auto image_layout = update->pImageInfo[di].imageLayout;
1588                 if (!ValidateImageUpdate(image_view, image_layout, update->descriptorType, device_data_, error_code, error_msg)) {
1589                     std::stringstream error_str;
1590                     error_str << "Attempted write update to combined image sampler descriptor failed due to: "
1591                               << error_msg->c_str();
1592                     *error_msg = error_str.str();
1593                     return false;
1594                 }
1595             }
1596             // Intentional fall-through to validate sampler
1597         }
1598         case VK_DESCRIPTOR_TYPE_SAMPLER: {
1599             for (uint32_t di = 0; di < update->descriptorCount; ++di) {
1600                 if (!descriptors_[index + di].get()->IsImmutableSampler()) {
1601                     if (!ValidateSampler(update->pImageInfo[di].sampler, device_data_)) {
1602                         *error_code = VALIDATION_ERROR_15c0028a;
1603                         std::stringstream error_str;
1604                         error_str << "Attempted write update to sampler descriptor with invalid sampler: "
1605                                   << update->pImageInfo[di].sampler << ".";
1606                         *error_msg = error_str.str();
1607                         return false;
1608                     }
1609                 } else {
1610                     // TODO : Warn here
1611                 }
1612             }
1613             break;
1614         }
1615         case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1616         case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1617         case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: {
1618             for (uint32_t di = 0; di < update->descriptorCount; ++di) {
1619                 auto image_view = update->pImageInfo[di].imageView;
1620                 auto image_layout = update->pImageInfo[di].imageLayout;
1621                 if (!ValidateImageUpdate(image_view, image_layout, update->descriptorType, device_data_, error_code, error_msg)) {
1622                     std::stringstream error_str;
1623                     error_str << "Attempted write update to image descriptor failed due to: " << error_msg->c_str();
1624                     *error_msg = error_str.str();
1625                     return false;
1626                 }
1627             }
1628             break;
1629         }
1630         case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1631         case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
1632             for (uint32_t di = 0; di < update->descriptorCount; ++di) {
1633                 auto buffer_view = update->pTexelBufferView[di];
1634                 auto bv_state = GetBufferViewState(device_data_, buffer_view);
1635                 if (!bv_state) {
1636                     *error_code = VALIDATION_ERROR_15c00286;
1637                     std::stringstream error_str;
1638                     error_str << "Attempted write update to texel buffer descriptor with invalid buffer view: " << buffer_view;
1639                     *error_msg = error_str.str();
1640                     return false;
1641                 }
1642                 auto buffer = bv_state->create_info.buffer;
1643                 auto buffer_state = GetBufferState(device_data_, buffer);
1644                 // Verify that buffer underlying the view hasn't been destroyed prematurely
1645                 if (!buffer_state) {
1646                     *error_code = VALIDATION_ERROR_15c00286;
1647                     std::stringstream error_str;
1648                     error_str << "Attempted write update to texel buffer descriptor failed because underlying buffer (" << buffer
1649                               << ") has been destroyed: " << error_msg->c_str();
1650                     *error_msg = error_str.str();
1651                     return false;
1652                 } else if (!ValidateBufferUsage(buffer_state, update->descriptorType, error_code, error_msg)) {
1653                     std::stringstream error_str;
1654                     error_str << "Attempted write update to texel buffer descriptor failed due to: " << error_msg->c_str();
1655                     *error_msg = error_str.str();
1656                     return false;
1657                 }
1658             }
1659             break;
1660         }
1661         case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1662         case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1663         case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1664         case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
1665             for (uint32_t di = 0; di < update->descriptorCount; ++di) {
1666                 if (!ValidateBufferUpdate(update->pBufferInfo + di, update->descriptorType, error_code, error_msg)) {
1667                     std::stringstream error_str;
1668                     error_str << "Attempted write update to buffer descriptor failed due to: " << error_msg->c_str();
1669                     *error_msg = error_str.str();
1670                     return false;
1671                 }
1672             }
1673             break;
1674         }
1675         default:
1676             assert(0);  // We've already verified update type so should never get here
1677             break;
1678     }
1679     // All checks passed so update contents are good
1680     return true;
1681 }
1682 // Verify that the contents of the update are ok, but don't perform actual update
VerifyCopyUpdateContents(const VkCopyDescriptorSet * update,const DescriptorSet * src_set,VkDescriptorType type,uint32_t index,UNIQUE_VALIDATION_ERROR_CODE * error_code,std::string * error_msg) const1683 bool cvdescriptorset::DescriptorSet::VerifyCopyUpdateContents(const VkCopyDescriptorSet *update, const DescriptorSet *src_set,
1684                                                               VkDescriptorType type, uint32_t index,
1685                                                               UNIQUE_VALIDATION_ERROR_CODE *error_code,
1686                                                               std::string *error_msg) const {
1687     // Note : Repurposing some Write update error codes here as specific details aren't called out for copy updates like they are
1688     // for write updates
1689     switch (src_set->descriptors_[index]->descriptor_class) {
1690         case PlainSampler: {
1691             for (uint32_t di = 0; di < update->descriptorCount; ++di) {
1692                 const auto src_desc = src_set->descriptors_[index + di].get();
1693                 if (!src_desc->updated) continue;
1694                 if (!src_desc->IsImmutableSampler()) {
1695                     auto update_sampler = static_cast<SamplerDescriptor *>(src_desc)->GetSampler();
1696                     if (!ValidateSampler(update_sampler, device_data_)) {
1697                         *error_code = VALIDATION_ERROR_15c0028a;
1698                         std::stringstream error_str;
1699                         error_str << "Attempted copy update to sampler descriptor with invalid sampler: " << update_sampler << ".";
1700                         *error_msg = error_str.str();
1701                         return false;
1702                     }
1703                 } else {
1704                     // TODO : Warn here
1705                 }
1706             }
1707             break;
1708         }
1709         case ImageSampler: {
1710             for (uint32_t di = 0; di < update->descriptorCount; ++di) {
1711                 const auto src_desc = src_set->descriptors_[index + di].get();
1712                 if (!src_desc->updated) continue;
1713                 auto img_samp_desc = static_cast<const ImageSamplerDescriptor *>(src_desc);
1714                 // First validate sampler
1715                 if (!img_samp_desc->IsImmutableSampler()) {
1716                     auto update_sampler = img_samp_desc->GetSampler();
1717                     if (!ValidateSampler(update_sampler, device_data_)) {
1718                         *error_code = VALIDATION_ERROR_15c0028a;
1719                         std::stringstream error_str;
1720                         error_str << "Attempted copy update to sampler descriptor with invalid sampler: " << update_sampler << ".";
1721                         *error_msg = error_str.str();
1722                         return false;
1723                     }
1724                 } else {
1725                     // TODO : Warn here
1726                 }
1727                 // Validate image
1728                 auto image_view = img_samp_desc->GetImageView();
1729                 auto image_layout = img_samp_desc->GetImageLayout();
1730                 if (!ValidateImageUpdate(image_view, image_layout, type, device_data_, error_code, error_msg)) {
1731                     std::stringstream error_str;
1732                     error_str << "Attempted copy update to combined image sampler descriptor failed due to: " << error_msg->c_str();
1733                     *error_msg = error_str.str();
1734                     return false;
1735                 }
1736             }
1737             break;
1738         }
1739         case Image: {
1740             for (uint32_t di = 0; di < update->descriptorCount; ++di) {
1741                 const auto src_desc = src_set->descriptors_[index + di].get();
1742                 if (!src_desc->updated) continue;
1743                 auto img_desc = static_cast<const ImageDescriptor *>(src_desc);
1744                 auto image_view = img_desc->GetImageView();
1745                 auto image_layout = img_desc->GetImageLayout();
1746                 if (!ValidateImageUpdate(image_view, image_layout, type, device_data_, error_code, error_msg)) {
1747                     std::stringstream error_str;
1748                     error_str << "Attempted copy update to image descriptor failed due to: " << error_msg->c_str();
1749                     *error_msg = error_str.str();
1750                     return false;
1751                 }
1752             }
1753             break;
1754         }
1755         case TexelBuffer: {
1756             for (uint32_t di = 0; di < update->descriptorCount; ++di) {
1757                 const auto src_desc = src_set->descriptors_[index + di].get();
1758                 if (!src_desc->updated) continue;
1759                 auto buffer_view = static_cast<TexelDescriptor *>(src_desc)->GetBufferView();
1760                 auto bv_state = GetBufferViewState(device_data_, buffer_view);
1761                 if (!bv_state) {
1762                     *error_code = VALIDATION_ERROR_15c00286;
1763                     std::stringstream error_str;
1764                     error_str << "Attempted copy update to texel buffer descriptor with invalid buffer view: " << buffer_view;
1765                     *error_msg = error_str.str();
1766                     return false;
1767                 }
1768                 auto buffer = bv_state->create_info.buffer;
1769                 if (!ValidateBufferUsage(GetBufferState(device_data_, buffer), type, error_code, error_msg)) {
1770                     std::stringstream error_str;
1771                     error_str << "Attempted copy update to texel buffer descriptor failed due to: " << error_msg->c_str();
1772                     *error_msg = error_str.str();
1773                     return false;
1774                 }
1775             }
1776             break;
1777         }
1778         case GeneralBuffer: {
1779             for (uint32_t di = 0; di < update->descriptorCount; ++di) {
1780                 const auto src_desc = src_set->descriptors_[index + di].get();
1781                 if (!src_desc->updated) continue;
1782                 auto buffer = static_cast<BufferDescriptor *>(src_desc)->GetBuffer();
1783                 if (!ValidateBufferUsage(GetBufferState(device_data_, buffer), type, error_code, error_msg)) {
1784                     std::stringstream error_str;
1785                     error_str << "Attempted copy update to buffer descriptor failed due to: " << error_msg->c_str();
1786                     *error_msg = error_str.str();
1787                     return false;
1788                 }
1789             }
1790             break;
1791         }
1792         default:
1793             assert(0);  // We've already verified update type so should never get here
1794             break;
1795     }
1796     // All checks passed so update contents are good
1797     return true;
1798 }
1799 // Update the common AllocateDescriptorSetsData
UpdateAllocateDescriptorSetsData(const layer_data * dev_data,const VkDescriptorSetAllocateInfo * p_alloc_info,AllocateDescriptorSetsData * ds_data)1800 void cvdescriptorset::UpdateAllocateDescriptorSetsData(const layer_data *dev_data, const VkDescriptorSetAllocateInfo *p_alloc_info,
1801                                                        AllocateDescriptorSetsData *ds_data) {
1802     for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
1803         auto layout = GetDescriptorSetLayout(dev_data, p_alloc_info->pSetLayouts[i]);
1804         if (layout) {
1805             ds_data->layout_nodes[i] = layout;
1806             // Count total descriptors required per type
1807             for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
1808                 const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
1809                 uint32_t typeIndex = static_cast<uint32_t>(binding_layout->descriptorType);
1810                 ds_data->required_descriptors_by_type[typeIndex] += binding_layout->descriptorCount;
1811             }
1812         }
1813         // Any unknown layouts will be flagged as errors during ValidateAllocateDescriptorSets() call
1814     }
1815 }
1816 // Verify that the state at allocate time is correct, but don't actually allocate the sets yet
ValidateAllocateDescriptorSets(const core_validation::layer_data * dev_data,const VkDescriptorSetAllocateInfo * p_alloc_info,const AllocateDescriptorSetsData * ds_data)1817 bool cvdescriptorset::ValidateAllocateDescriptorSets(const core_validation::layer_data *dev_data,
1818                                                      const VkDescriptorSetAllocateInfo *p_alloc_info,
1819                                                      const AllocateDescriptorSetsData *ds_data) {
1820     bool skip = false;
1821     auto report_data = core_validation::GetReportData(dev_data);
1822 
1823     for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
1824         auto layout = GetDescriptorSetLayout(dev_data, p_alloc_info->pSetLayouts[i]);
1825         if (layout) {  // nullptr layout indicates no valid layout handle for this device, validated/logged in object_tracker
1826             if (layout->GetCreateFlags() & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR) {
1827                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
1828                                 HandleToUint64(p_alloc_info->pSetLayouts[i]), __LINE__, VALIDATION_ERROR_04c00268, "DS",
1829                                 "Layout 0x%" PRIxLEAST64 " specified at pSetLayouts[%" PRIu32
1830                                 "] in vkAllocateDescriptorSets() was created with invalid flag %s set. %s",
1831                                 HandleToUint64(p_alloc_info->pSetLayouts[i]), i,
1832                                 "VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR",
1833                                 validation_error_map[VALIDATION_ERROR_04c00268]);
1834             }
1835         }
1836     }
1837     if (!GetDeviceExtensions(dev_data)->vk_khr_maintenance1) {
1838         auto pool_state = GetDescriptorPoolState(dev_data, p_alloc_info->descriptorPool);
1839         // Track number of descriptorSets allowable in this pool
1840         if (pool_state->availableSets < p_alloc_info->descriptorSetCount) {
1841             skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
1842                             HandleToUint64(pool_state->pool), __LINE__, VALIDATION_ERROR_04c00264, "DS",
1843                             "Unable to allocate %u descriptorSets from pool 0x%" PRIxLEAST64
1844                             ". This pool only has %d descriptorSets remaining. %s",
1845                             p_alloc_info->descriptorSetCount, HandleToUint64(pool_state->pool), pool_state->availableSets,
1846                             validation_error_map[VALIDATION_ERROR_04c00264]);
1847         }
1848         // Determine whether descriptor counts are satisfiable
1849         for (uint32_t i = 0; i < VK_DESCRIPTOR_TYPE_RANGE_SIZE; i++) {
1850             if (ds_data->required_descriptors_by_type[i] > pool_state->availableDescriptorTypeCount[i]) {
1851                 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
1852                                 HandleToUint64(pool_state->pool), __LINE__, VALIDATION_ERROR_04c00266, "DS",
1853                                 "Unable to allocate %u descriptors of type %s from pool 0x%" PRIxLEAST64
1854                                 ". This pool only has %d descriptors of this type remaining. %s",
1855                                 ds_data->required_descriptors_by_type[i], string_VkDescriptorType(VkDescriptorType(i)),
1856                                 HandleToUint64(pool_state->pool), pool_state->availableDescriptorTypeCount[i],
1857                                 validation_error_map[VALIDATION_ERROR_04c00266]);
1858             }
1859         }
1860     }
1861 
1862     return skip;
1863 }
1864 // Decrement allocated sets from the pool and insert new sets into set_map
PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo * p_alloc_info,const VkDescriptorSet * descriptor_sets,const AllocateDescriptorSetsData * ds_data,std::unordered_map<VkDescriptorPool,DESCRIPTOR_POOL_STATE * > * pool_map,std::unordered_map<VkDescriptorSet,cvdescriptorset::DescriptorSet * > * set_map,layer_data * dev_data)1865 void cvdescriptorset::PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *p_alloc_info,
1866                                                     const VkDescriptorSet *descriptor_sets,
1867                                                     const AllocateDescriptorSetsData *ds_data,
1868                                                     std::unordered_map<VkDescriptorPool, DESCRIPTOR_POOL_STATE *> *pool_map,
1869                                                     std::unordered_map<VkDescriptorSet, cvdescriptorset::DescriptorSet *> *set_map,
1870                                                     layer_data *dev_data) {
1871     auto pool_state = (*pool_map)[p_alloc_info->descriptorPool];
1872     // Account for sets and individual descriptors allocated from pool
1873     pool_state->availableSets -= p_alloc_info->descriptorSetCount;
1874     for (uint32_t i = 0; i < VK_DESCRIPTOR_TYPE_RANGE_SIZE; i++) {
1875         pool_state->availableDescriptorTypeCount[i] -= ds_data->required_descriptors_by_type[i];
1876     }
1877     // Create tracking object for each descriptor set; insert into global map and the pool's set.
1878     for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
1879         auto new_ds = new cvdescriptorset::DescriptorSet(descriptor_sets[i], p_alloc_info->descriptorPool, ds_data->layout_nodes[i],
1880                                                          dev_data);
1881 
1882         pool_state->sets.insert(new_ds);
1883         new_ds->in_use.store(0);
1884         (*set_map)[descriptor_sets[i]] = new_ds;
1885     }
1886 }
1887 
PrefilterBindRequestMap(cvdescriptorset::DescriptorSet & ds,const BindingReqMap & in_map,GLOBAL_CB_NODE * cb_state)1888 cvdescriptorset::PrefilterBindRequestMap::PrefilterBindRequestMap(cvdescriptorset::DescriptorSet &ds, const BindingReqMap &in_map,
1889                                                                   GLOBAL_CB_NODE *cb_state)
1890     : filtered_map_(), orig_map_(in_map) {
1891     if (ds.GetTotalDescriptorCount() > kManyDescriptors_) {
1892         filtered_map_.reset(new std::map<uint32_t, descriptor_req>());
1893         ds.FilterAndTrackBindingReqs(cb_state, orig_map_, filtered_map_.get());
1894     }
1895 }
PrefilterBindRequestMap(cvdescriptorset::DescriptorSet & ds,const BindingReqMap & in_map,GLOBAL_CB_NODE * cb_state,PIPELINE_STATE * pipeline)1896 cvdescriptorset::PrefilterBindRequestMap::PrefilterBindRequestMap(cvdescriptorset::DescriptorSet &ds, const BindingReqMap &in_map,
1897                                                                   GLOBAL_CB_NODE *cb_state, PIPELINE_STATE *pipeline)
1898     : filtered_map_(), orig_map_(in_map) {
1899     if (ds.GetTotalDescriptorCount() > kManyDescriptors_) {
1900         filtered_map_.reset(new std::map<uint32_t, descriptor_req>());
1901         ds.FilterAndTrackBindingReqs(cb_state, pipeline, orig_map_, filtered_map_.get());
1902     }
1903 }
1904