• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2 * Copyright 2016 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7 
8 #include "src/gpu/vk/GrVkDescriptorSetManager.h"
9 
10 #include "src/gpu/vk/GrVkDescriptorPool.h"
11 #include "src/gpu/vk/GrVkDescriptorSet.h"
12 #include "src/gpu/vk/GrVkGpu.h"
13 #include "src/gpu/vk/GrVkUniformHandler.h"
14 
15 #if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
16 #include <sanitizer/lsan_interface.h>
17 #endif
18 
CreateUniformManager(GrVkGpu * gpu)19 GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateUniformManager(GrVkGpu* gpu) {
20     SkSTArray<1, uint32_t> visibilities;
21     uint32_t stages = kVertex_GrShaderFlag | kFragment_GrShaderFlag;
22     visibilities.push_back(stages);
23     SkTArray<const GrVkSampler*> samplers;
24     return Create(gpu, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, visibilities, samplers);
25 }
26 
CreateSamplerManager(GrVkGpu * gpu,VkDescriptorType type,const GrVkUniformHandler & uniformHandler)27 GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateSamplerManager(
28         GrVkGpu* gpu, VkDescriptorType type, const GrVkUniformHandler& uniformHandler) {
29     SkSTArray<4, uint32_t> visibilities;
30     SkSTArray<4, const GrVkSampler*> immutableSamplers;
31     SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
32     for (int i = 0 ; i < uniformHandler.numSamplers(); ++i) {
33         visibilities.push_back(uniformHandler.samplerVisibility(i));
34         immutableSamplers.push_back(uniformHandler.immutableSampler(i));
35     }
36     return Create(gpu, type, visibilities, immutableSamplers);
37 }
38 
CreateZeroSamplerManager(GrVkGpu * gpu)39 GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateZeroSamplerManager(GrVkGpu* gpu) {
40     SkTArray<uint32_t> visibilities;
41     SkTArray<const GrVkSampler*> immutableSamplers;
42     return Create(gpu, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, visibilities, immutableSamplers);
43 }
44 
CreateInputManager(GrVkGpu * gpu)45 GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateInputManager(GrVkGpu* gpu) {
46     SkSTArray<1, uint32_t> visibilities;
47     visibilities.push_back(kFragment_GrShaderFlag);
48     SkTArray<const GrVkSampler*> samplers;
49     return Create(gpu, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, visibilities, samplers);
50 }
51 
visibility_to_vk_stage_flags(uint32_t visibility)52 VkShaderStageFlags visibility_to_vk_stage_flags(uint32_t visibility) {
53     VkShaderStageFlags flags = 0;
54 
55     if (visibility & kVertex_GrShaderFlag) {
56         flags |= VK_SHADER_STAGE_VERTEX_BIT;
57     }
58     if (visibility & kFragment_GrShaderFlag) {
59         flags |= VK_SHADER_STAGE_FRAGMENT_BIT;
60     }
61     return flags;
62 }
63 
get_layout_and_desc_count(GrVkGpu * gpu,VkDescriptorType type,const SkTArray<uint32_t> & visibilities,const SkTArray<const GrVkSampler * > & immutableSamplers,VkDescriptorSetLayout * descSetLayout,uint32_t * descCountPerSet)64 static bool get_layout_and_desc_count(GrVkGpu* gpu,
65                                       VkDescriptorType type,
66                                       const SkTArray<uint32_t>& visibilities,
67                                       const SkTArray<const GrVkSampler*>& immutableSamplers,
68                                       VkDescriptorSetLayout* descSetLayout,
69                                       uint32_t* descCountPerSet) {
70     if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
71         VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type) {
72         uint32_t numBindings = visibilities.count();
73         std::unique_ptr<VkDescriptorSetLayoutBinding[]> dsSamplerBindings(
74                 new VkDescriptorSetLayoutBinding[numBindings]);
75         *descCountPerSet = 0;
76         for (uint32_t i = 0; i < numBindings; ++i) {
77             uint32_t visibility = visibilities[i];
78             dsSamplerBindings[i].binding = i;
79             dsSamplerBindings[i].descriptorType = type;
80             dsSamplerBindings[i].descriptorCount = 1;
81             dsSamplerBindings[i].stageFlags = visibility_to_vk_stage_flags(visibility);
82             if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type) {
83                 if (immutableSamplers[i]) {
84                     (*descCountPerSet) += gpu->vkCaps().ycbcrCombinedImageSamplerDescriptorCount();
85                     dsSamplerBindings[i].pImmutableSamplers = immutableSamplers[i]->samplerPtr();
86                 } else {
87                     (*descCountPerSet)++;
88                     dsSamplerBindings[i].pImmutableSamplers = nullptr;
89                 }
90             }
91         }
92 
93         VkDescriptorSetLayoutCreateInfo dsSamplerLayoutCreateInfo;
94         memset(&dsSamplerLayoutCreateInfo, 0, sizeof(VkDescriptorSetLayoutCreateInfo));
95         dsSamplerLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
96         dsSamplerLayoutCreateInfo.pNext = nullptr;
97         dsSamplerLayoutCreateInfo.flags = 0;
98         dsSamplerLayoutCreateInfo.bindingCount = numBindings;
99         // Setting to nullptr fixes an error in the param checker validation layer. Even though
100         // bindingCount is 0 (which is valid), it still tries to validate pBindings unless it is
101         // null.
102         dsSamplerLayoutCreateInfo.pBindings = numBindings ? dsSamplerBindings.get() : nullptr;
103 
104 #if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
105         // skia:8713
106         __lsan::ScopedDisabler lsanDisabler;
107 #endif
108         VkResult result;
109         GR_VK_CALL_RESULT(gpu, result,
110                           CreateDescriptorSetLayout(gpu->device(),
111                                                     &dsSamplerLayoutCreateInfo,
112                                                     nullptr,
113                                                     descSetLayout));
114         if (result != VK_SUCCESS) {
115             return false;
116         }
117     } else if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) {
118         static constexpr int kUniformDescPerSet = 1;
119         SkASSERT(kUniformDescPerSet == visibilities.count());
120         // Create Uniform Buffer Descriptor
121         VkDescriptorSetLayoutBinding dsUniBinding;
122         dsUniBinding.binding = GrVkUniformHandler::kUniformBinding;
123         dsUniBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
124         dsUniBinding.descriptorCount = 1;
125         dsUniBinding.stageFlags = visibility_to_vk_stage_flags(visibilities[0]);
126         dsUniBinding.pImmutableSamplers = nullptr;
127 
128         VkDescriptorSetLayoutCreateInfo uniformLayoutCreateInfo;
129         uniformLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
130         uniformLayoutCreateInfo.pNext = nullptr;
131         uniformLayoutCreateInfo.flags = 0;
132         uniformLayoutCreateInfo.bindingCount = 1;
133         uniformLayoutCreateInfo.pBindings = &dsUniBinding;
134 
135 #if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
136         // skia:8713
137         __lsan::ScopedDisabler lsanDisabler;
138 #endif
139         VkResult result;
140         GR_VK_CALL_RESULT(gpu, result, CreateDescriptorSetLayout(gpu->device(),
141                                                                  &uniformLayoutCreateInfo,
142                                                                  nullptr,
143                                                                  descSetLayout));
144         if (result != VK_SUCCESS) {
145             return false;
146         }
147 
148         *descCountPerSet = kUniformDescPerSet;
149     } else {
150         SkASSERT(type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
151         static constexpr int kInputDescPerSet = 1;
152         SkASSERT(kInputDescPerSet == visibilities.count());
153 
154         // Create Input Buffer Descriptor
155         VkDescriptorSetLayoutBinding dsInpuBinding;
156         dsInpuBinding.binding = 0;
157         dsInpuBinding.descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
158         dsInpuBinding.descriptorCount = 1;
159         SkASSERT(visibilities[0] == kFragment_GrShaderFlag);
160         dsInpuBinding.stageFlags = visibility_to_vk_stage_flags(visibilities[0]);
161         dsInpuBinding.pImmutableSamplers = nullptr;
162 
163         VkDescriptorSetLayoutCreateInfo inputLayoutCreateInfo;
164         inputLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
165         inputLayoutCreateInfo.pNext = nullptr;
166         inputLayoutCreateInfo.flags = 0;
167         inputLayoutCreateInfo.bindingCount = 1;
168         inputLayoutCreateInfo.pBindings = &dsInpuBinding;
169 
170 #if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
171         // skia:8713
172         __lsan::ScopedDisabler lsanDisabler;
173 #endif
174         VkResult result;
175         GR_VK_CALL_RESULT(gpu, result, CreateDescriptorSetLayout(gpu->device(),
176                                                                  &inputLayoutCreateInfo,
177                                                                  nullptr, descSetLayout));
178         if (result != VK_SUCCESS) {
179             return false;
180         }
181 
182         *descCountPerSet = kInputDescPerSet;
183     }
184     return true;
185 }
186 
Create(GrVkGpu * gpu,VkDescriptorType type,const SkTArray<uint32_t> & visibilities,const SkTArray<const GrVkSampler * > & immutableSamplers)187 GrVkDescriptorSetManager* GrVkDescriptorSetManager::Create(
188         GrVkGpu* gpu, VkDescriptorType type,
189         const SkTArray<uint32_t>& visibilities,
190         const SkTArray<const GrVkSampler*>& immutableSamplers) {
191 #ifdef SK_DEBUG
192     if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
193         SkASSERT(visibilities.count() == immutableSamplers.count());
194     } else {
195         SkASSERT(immutableSamplers.count() == 0);
196     }
197 #endif
198 
199     VkDescriptorSetLayout descSetLayout;
200     uint32_t descCountPerSet;
201     if (!get_layout_and_desc_count(gpu, type, visibilities, immutableSamplers, &descSetLayout,
202                                    &descCountPerSet)) {
203         return nullptr;
204     }
205     return new GrVkDescriptorSetManager(gpu, type, descSetLayout, descCountPerSet, visibilities,
206                                         immutableSamplers);
207 }
208 
GrVkDescriptorSetManager(GrVkGpu * gpu,VkDescriptorType type,VkDescriptorSetLayout descSetLayout,uint32_t descCountPerSet,const SkTArray<uint32_t> & visibilities,const SkTArray<const GrVkSampler * > & immutableSamplers)209 GrVkDescriptorSetManager::GrVkDescriptorSetManager(
210         GrVkGpu* gpu, VkDescriptorType type, VkDescriptorSetLayout descSetLayout,
211         uint32_t descCountPerSet, const SkTArray<uint32_t>& visibilities,
212         const SkTArray<const GrVkSampler*>& immutableSamplers)
213     : fPoolManager(descSetLayout, type, descCountPerSet) {
214     for (int i = 0; i < visibilities.count(); ++i) {
215         fBindingVisibilities.push_back(visibilities[i]);
216     }
217     for (int i = 0; i < immutableSamplers.count(); ++i) {
218         const GrVkSampler* sampler = immutableSamplers[i];
219         if (sampler) {
220             sampler->ref();
221         }
222         fImmutableSamplers.push_back(sampler);
223     }
224 }
225 
getDescriptorSet(GrVkGpu * gpu,const Handle & handle)226 const GrVkDescriptorSet* GrVkDescriptorSetManager::getDescriptorSet(GrVkGpu* gpu,
227                                                                     const Handle& handle) {
228     const GrVkDescriptorSet* ds = nullptr;
229     int count = fFreeSets.count();
230     if (count > 0) {
231         ds = fFreeSets[count - 1];
232         fFreeSets.removeShuffle(count - 1);
233     } else {
234         VkDescriptorSet vkDS;
235         if (!fPoolManager.getNewDescriptorSet(gpu, &vkDS)) {
236             return nullptr;
237         }
238 
239         ds = new GrVkDescriptorSet(gpu, vkDS, fPoolManager.fPool, handle);
240     }
241     SkASSERT(ds);
242     return ds;
243 }
244 
recycleDescriptorSet(const GrVkDescriptorSet * descSet)245 void GrVkDescriptorSetManager::recycleDescriptorSet(const GrVkDescriptorSet* descSet) {
246     SkASSERT(descSet);
247     fFreeSets.push_back(descSet);
248 }
249 
release(GrVkGpu * gpu)250 void GrVkDescriptorSetManager::release(GrVkGpu* gpu) {
251     fPoolManager.freeGPUResources(gpu);
252 
253     for (int i = 0; i < fFreeSets.count(); ++i) {
254         fFreeSets[i]->unref();
255     }
256     fFreeSets.reset();
257 
258     for (int i = 0; i < fImmutableSamplers.count(); ++i) {
259         if (fImmutableSamplers[i]) {
260             fImmutableSamplers[i]->unref();
261         }
262     }
263     fImmutableSamplers.reset();
264 }
265 
isCompatible(VkDescriptorType type,const GrVkUniformHandler * uniHandler) const266 bool GrVkDescriptorSetManager::isCompatible(VkDescriptorType type,
267                                             const GrVkUniformHandler* uniHandler) const {
268     SkASSERT(uniHandler);
269     if (type != fPoolManager.fDescType) {
270         return false;
271     }
272 
273     SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
274     if (fBindingVisibilities.count() != uniHandler->numSamplers()) {
275         return false;
276     }
277     for (int i = 0; i < uniHandler->numSamplers(); ++i) {
278         if (uniHandler->samplerVisibility(i) != fBindingVisibilities[i] ||
279             uniHandler->immutableSampler(i) != fImmutableSamplers[i]) {
280             return false;
281         }
282     }
283     return true;
284 }
285 
isZeroSampler() const286 bool GrVkDescriptorSetManager::isZeroSampler() const {
287     if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER != fPoolManager.fDescType) {
288         return false;
289     }
290     if (fBindingVisibilities.count()) {
291         return false;
292     }
293     return true;
294 }
295 
296 ////////////////////////////////////////////////////////////////////////////////
297 
DescriptorPoolManager(VkDescriptorSetLayout layout,VkDescriptorType type,uint32_t descCountPerSet)298 GrVkDescriptorSetManager::DescriptorPoolManager::DescriptorPoolManager(
299         VkDescriptorSetLayout layout,
300         VkDescriptorType type,
301         uint32_t descCountPerSet)
302     : fDescLayout(layout)
303     , fDescType(type)
304     , fDescCountPerSet(descCountPerSet)
305     , fMaxDescriptors(kStartNumDescriptors)
306     , fCurrentDescriptorCount(0)
307     , fPool(nullptr) {
308 }
309 
getNewPool(GrVkGpu * gpu)310 bool GrVkDescriptorSetManager::DescriptorPoolManager::getNewPool(GrVkGpu* gpu) {
311     if (fPool) {
312         fPool->unref();
313         uint32_t newPoolSize = fMaxDescriptors + ((fMaxDescriptors + 1) >> 1);
314         if (newPoolSize < kMaxDescriptors) {
315             fMaxDescriptors = newPoolSize;
316         } else {
317             fMaxDescriptors = kMaxDescriptors;
318         }
319 
320     }
321     fPool = gpu->resourceProvider().findOrCreateCompatibleDescriptorPool(fDescType,
322                                                                          fMaxDescriptors);
323     return SkToBool(fPool);
324 }
325 
getNewDescriptorSet(GrVkGpu * gpu,VkDescriptorSet * ds)326 bool GrVkDescriptorSetManager::DescriptorPoolManager::getNewDescriptorSet(GrVkGpu* gpu,
327                                                                           VkDescriptorSet* ds) {
328     if (!fMaxDescriptors) {
329         return false;
330     }
331     fCurrentDescriptorCount += fDescCountPerSet;
332     if (!fPool || fCurrentDescriptorCount > fMaxDescriptors) {
333         if (!this->getNewPool(gpu) ) {
334             return false;
335         }
336         fCurrentDescriptorCount = fDescCountPerSet;
337     }
338 
339     VkDescriptorSetAllocateInfo dsAllocateInfo;
340     memset(&dsAllocateInfo, 0, sizeof(VkDescriptorSetAllocateInfo));
341     dsAllocateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
342     dsAllocateInfo.pNext = nullptr;
343     dsAllocateInfo.descriptorPool = fPool->descPool();
344     dsAllocateInfo.descriptorSetCount = 1;
345     dsAllocateInfo.pSetLayouts = &fDescLayout;
346     VkResult result;
347     GR_VK_CALL_RESULT(gpu, result, AllocateDescriptorSets(gpu->device(),
348                                                           &dsAllocateInfo,
349                                                           ds));
350     return result == VK_SUCCESS;
351 }
352 
freeGPUResources(GrVkGpu * gpu)353 void GrVkDescriptorSetManager::DescriptorPoolManager::freeGPUResources(GrVkGpu* gpu) {
354     if (fDescLayout) {
355         GR_VK_CALL(gpu->vkInterface(), DestroyDescriptorSetLayout(gpu->device(), fDescLayout,
356                                                                   nullptr));
357         fDescLayout = VK_NULL_HANDLE;
358     }
359 
360     if (fPool) {
361         fPool->unref();
362         fPool = nullptr;
363     }
364 }
365 
366