• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2 * Copyright 2016 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7 
8 #include "src/gpu/vk/GrVkDescriptorSetManager.h"
9 
10 #include "src/gpu/vk/GrVkDescriptorPool.h"
11 #include "src/gpu/vk/GrVkDescriptorSet.h"
12 #include "src/gpu/vk/GrVkGpu.h"
13 #include "src/gpu/vk/GrVkUniformHandler.h"
14 
15 #if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
16 #include <sanitizer/lsan_interface.h>
17 #endif
18 
CreateUniformManager(GrVkGpu * gpu)19 GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateUniformManager(GrVkGpu* gpu) {
20     SkSTArray<1, uint32_t> visibilities;
21     uint32_t stages = kVertex_GrShaderFlag | kFragment_GrShaderFlag;
22     if (gpu->vkCaps().shaderCaps()->geometryShaderSupport()) {
23         stages |= kGeometry_GrShaderFlag;
24     }
25     visibilities.push_back(stages);
26     SkTArray<const GrVkSampler*> samplers;
27     return Create(gpu, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, visibilities, samplers);
28 }
29 
CreateSamplerManager(GrVkGpu * gpu,VkDescriptorType type,const GrVkUniformHandler & uniformHandler)30 GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateSamplerManager(
31         GrVkGpu* gpu, VkDescriptorType type, const GrVkUniformHandler& uniformHandler) {
32     SkSTArray<4, uint32_t> visibilities;
33     SkSTArray<4, const GrVkSampler*> immutableSamplers;
34     SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
35     for (int i = 0 ; i < uniformHandler.numSamplers(); ++i) {
36         visibilities.push_back(uniformHandler.samplerVisibility(i));
37         immutableSamplers.push_back(uniformHandler.immutableSampler(i));
38     }
39     return Create(gpu, type, visibilities, immutableSamplers);
40 }
41 
CreateZeroSamplerManager(GrVkGpu * gpu)42 GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateZeroSamplerManager(GrVkGpu* gpu) {
43     SkTArray<uint32_t> visibilities;
44     SkTArray<const GrVkSampler*> immutableSamplers;
45     return Create(gpu, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, visibilities, immutableSamplers);
46 }
47 
CreateInputManager(GrVkGpu * gpu)48 GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateInputManager(GrVkGpu* gpu) {
49     SkSTArray<1, uint32_t> visibilities;
50     visibilities.push_back(kFragment_GrShaderFlag);
51     SkTArray<const GrVkSampler*> samplers;
52     return Create(gpu, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, visibilities, samplers);
53 }
54 
visibility_to_vk_stage_flags(uint32_t visibility)55 VkShaderStageFlags visibility_to_vk_stage_flags(uint32_t visibility) {
56     VkShaderStageFlags flags = 0;
57 
58     if (visibility & kVertex_GrShaderFlag) {
59         flags |= VK_SHADER_STAGE_VERTEX_BIT;
60     }
61     if (visibility & kGeometry_GrShaderFlag) {
62         flags |= VK_SHADER_STAGE_GEOMETRY_BIT;
63     }
64     if (visibility & kFragment_GrShaderFlag) {
65         flags |= VK_SHADER_STAGE_FRAGMENT_BIT;
66     }
67     return flags;
68 }
69 
get_layout_and_desc_count(GrVkGpu * gpu,VkDescriptorType type,const SkTArray<uint32_t> & visibilities,const SkTArray<const GrVkSampler * > & immutableSamplers,VkDescriptorSetLayout * descSetLayout,uint32_t * descCountPerSet)70 static bool get_layout_and_desc_count(GrVkGpu* gpu,
71                                       VkDescriptorType type,
72                                       const SkTArray<uint32_t>& visibilities,
73                                       const SkTArray<const GrVkSampler*>& immutableSamplers,
74                                       VkDescriptorSetLayout* descSetLayout,
75                                       uint32_t* descCountPerSet) {
76     if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
77         VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type) {
78         uint32_t numBindings = visibilities.count();
79         std::unique_ptr<VkDescriptorSetLayoutBinding[]> dsSamplerBindings(
80                 new VkDescriptorSetLayoutBinding[numBindings]);
81         *descCountPerSet = 0;
82         for (uint32_t i = 0; i < numBindings; ++i) {
83             uint32_t visibility = visibilities[i];
84             dsSamplerBindings[i].binding = i;
85             dsSamplerBindings[i].descriptorType = type;
86             dsSamplerBindings[i].descriptorCount = 1;
87             dsSamplerBindings[i].stageFlags = visibility_to_vk_stage_flags(visibility);
88             if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type) {
89                 if (immutableSamplers[i]) {
90                     (*descCountPerSet) += gpu->vkCaps().ycbcrCombinedImageSamplerDescriptorCount();
91                     dsSamplerBindings[i].pImmutableSamplers = immutableSamplers[i]->samplerPtr();
92                 } else {
93                     (*descCountPerSet)++;
94                     dsSamplerBindings[i].pImmutableSamplers = nullptr;
95                 }
96             }
97         }
98 
99         VkDescriptorSetLayoutCreateInfo dsSamplerLayoutCreateInfo;
100         memset(&dsSamplerLayoutCreateInfo, 0, sizeof(VkDescriptorSetLayoutCreateInfo));
101         dsSamplerLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
102         dsSamplerLayoutCreateInfo.pNext = nullptr;
103         dsSamplerLayoutCreateInfo.flags = 0;
104         dsSamplerLayoutCreateInfo.bindingCount = numBindings;
105         // Setting to nullptr fixes an error in the param checker validation layer. Even though
106         // bindingCount is 0 (which is valid), it still tries to validate pBindings unless it is
107         // null.
108         dsSamplerLayoutCreateInfo.pBindings = numBindings ? dsSamplerBindings.get() : nullptr;
109 
110 #if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
111         // skia:8713
112         __lsan::ScopedDisabler lsanDisabler;
113 #endif
114         VkResult result;
115         GR_VK_CALL_RESULT(gpu, result,
116                           CreateDescriptorSetLayout(gpu->device(),
117                                                     &dsSamplerLayoutCreateInfo,
118                                                     nullptr,
119                                                     descSetLayout));
120         if (result != VK_SUCCESS) {
121             return false;
122         }
123     } else if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) {
124         static constexpr int kUniformDescPerSet = 1;
125         SkASSERT(kUniformDescPerSet == visibilities.count());
126         // Create Uniform Buffer Descriptor
127         VkDescriptorSetLayoutBinding dsUniBinding;
128         dsUniBinding.binding = GrVkUniformHandler::kUniformBinding;
129         dsUniBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
130         dsUniBinding.descriptorCount = 1;
131         dsUniBinding.stageFlags = visibility_to_vk_stage_flags(visibilities[0]);
132         dsUniBinding.pImmutableSamplers = nullptr;
133 
134         VkDescriptorSetLayoutCreateInfo uniformLayoutCreateInfo;
135         uniformLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
136         uniformLayoutCreateInfo.pNext = nullptr;
137         uniformLayoutCreateInfo.flags = 0;
138         uniformLayoutCreateInfo.bindingCount = 1;
139         uniformLayoutCreateInfo.pBindings = &dsUniBinding;
140 
141 #if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
142         // skia:8713
143         __lsan::ScopedDisabler lsanDisabler;
144 #endif
145         VkResult result;
146         GR_VK_CALL_RESULT(gpu, result, CreateDescriptorSetLayout(gpu->device(),
147                                                                  &uniformLayoutCreateInfo,
148                                                                  nullptr,
149                                                                  descSetLayout));
150         if (result != VK_SUCCESS) {
151             return false;
152         }
153 
154         *descCountPerSet = kUniformDescPerSet;
155     } else {
156         SkASSERT(type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
157         static constexpr int kInputDescPerSet = 1;
158         SkASSERT(kInputDescPerSet == visibilities.count());
159 
160         // Create Input Buffer Descriptor
161         VkDescriptorSetLayoutBinding dsInpuBinding;
162         dsInpuBinding.binding = 0;
163         dsInpuBinding.descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
164         dsInpuBinding.descriptorCount = 1;
165         SkASSERT(visibilities[0] == kFragment_GrShaderFlag);
166         dsInpuBinding.stageFlags = visibility_to_vk_stage_flags(visibilities[0]);
167         dsInpuBinding.pImmutableSamplers = nullptr;
168 
169         VkDescriptorSetLayoutCreateInfo inputLayoutCreateInfo;
170         inputLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
171         inputLayoutCreateInfo.pNext = nullptr;
172         inputLayoutCreateInfo.flags = 0;
173         inputLayoutCreateInfo.bindingCount = 1;
174         inputLayoutCreateInfo.pBindings = &dsInpuBinding;
175 
176 #if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
177         // skia:8713
178         __lsan::ScopedDisabler lsanDisabler;
179 #endif
180         VkResult result;
181         GR_VK_CALL_RESULT(gpu, result, CreateDescriptorSetLayout(gpu->device(),
182                                                                  &inputLayoutCreateInfo,
183                                                                  nullptr, descSetLayout));
184         if (result != VK_SUCCESS) {
185             return false;
186         }
187 
188         *descCountPerSet = kInputDescPerSet;
189     }
190     return true;
191 }
192 
Create(GrVkGpu * gpu,VkDescriptorType type,const SkTArray<uint32_t> & visibilities,const SkTArray<const GrVkSampler * > & immutableSamplers)193 GrVkDescriptorSetManager* GrVkDescriptorSetManager::Create(
194         GrVkGpu* gpu, VkDescriptorType type,
195         const SkTArray<uint32_t>& visibilities,
196         const SkTArray<const GrVkSampler*>& immutableSamplers) {
197 #ifdef SK_DEBUG
198     if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
199         SkASSERT(visibilities.count() == immutableSamplers.count());
200     } else {
201         SkASSERT(immutableSamplers.count() == 0);
202     }
203 #endif
204 
205     VkDescriptorSetLayout descSetLayout;
206     uint32_t descCountPerSet;
207     if (!get_layout_and_desc_count(gpu, type, visibilities, immutableSamplers, &descSetLayout,
208                                    &descCountPerSet)) {
209         return nullptr;
210     }
211     return new GrVkDescriptorSetManager(gpu, type, descSetLayout, descCountPerSet, visibilities,
212                                         immutableSamplers);
213 }
214 
GrVkDescriptorSetManager(GrVkGpu * gpu,VkDescriptorType type,VkDescriptorSetLayout descSetLayout,uint32_t descCountPerSet,const SkTArray<uint32_t> & visibilities,const SkTArray<const GrVkSampler * > & immutableSamplers)215 GrVkDescriptorSetManager::GrVkDescriptorSetManager(
216         GrVkGpu* gpu, VkDescriptorType type, VkDescriptorSetLayout descSetLayout,
217         uint32_t descCountPerSet, const SkTArray<uint32_t>& visibilities,
218         const SkTArray<const GrVkSampler*>& immutableSamplers)
219     : fPoolManager(descSetLayout, type, descCountPerSet) {
220     for (int i = 0; i < visibilities.count(); ++i) {
221         fBindingVisibilities.push_back(visibilities[i]);
222     }
223     for (int i = 0; i < immutableSamplers.count(); ++i) {
224         const GrVkSampler* sampler = immutableSamplers[i];
225         if (sampler) {
226             sampler->ref();
227         }
228         fImmutableSamplers.push_back(sampler);
229     }
230 }
231 
getDescriptorSet(GrVkGpu * gpu,const Handle & handle)232 const GrVkDescriptorSet* GrVkDescriptorSetManager::getDescriptorSet(GrVkGpu* gpu,
233                                                                     const Handle& handle) {
234     const GrVkDescriptorSet* ds = nullptr;
235     int count = fFreeSets.count();
236     if (count > 0) {
237         ds = fFreeSets[count - 1];
238         fFreeSets.removeShuffle(count - 1);
239     } else {
240         VkDescriptorSet vkDS;
241         if (!fPoolManager.getNewDescriptorSet(gpu, &vkDS)) {
242             return nullptr;
243         }
244 
245         ds = new GrVkDescriptorSet(gpu, vkDS, fPoolManager.fPool, handle);
246     }
247     SkASSERT(ds);
248     return ds;
249 }
250 
recycleDescriptorSet(const GrVkDescriptorSet * descSet)251 void GrVkDescriptorSetManager::recycleDescriptorSet(const GrVkDescriptorSet* descSet) {
252     SkASSERT(descSet);
253     fFreeSets.push_back(descSet);
254 }
255 
release(GrVkGpu * gpu)256 void GrVkDescriptorSetManager::release(GrVkGpu* gpu) {
257     fPoolManager.freeGPUResources(gpu);
258 
259     for (int i = 0; i < fFreeSets.count(); ++i) {
260         fFreeSets[i]->unref();
261     }
262     fFreeSets.reset();
263 
264     for (int i = 0; i < fImmutableSamplers.count(); ++i) {
265         if (fImmutableSamplers[i]) {
266             fImmutableSamplers[i]->unref();
267         }
268     }
269     fImmutableSamplers.reset();
270 }
271 
isCompatible(VkDescriptorType type,const GrVkUniformHandler * uniHandler) const272 bool GrVkDescriptorSetManager::isCompatible(VkDescriptorType type,
273                                             const GrVkUniformHandler* uniHandler) const {
274     SkASSERT(uniHandler);
275     if (type != fPoolManager.fDescType) {
276         return false;
277     }
278 
279     SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
280     if (fBindingVisibilities.count() != uniHandler->numSamplers()) {
281         return false;
282     }
283     for (int i = 0; i < uniHandler->numSamplers(); ++i) {
284         if (uniHandler->samplerVisibility(i) != fBindingVisibilities[i] ||
285             uniHandler->immutableSampler(i) != fImmutableSamplers[i]) {
286             return false;
287         }
288     }
289     return true;
290 }
291 
isZeroSampler() const292 bool GrVkDescriptorSetManager::isZeroSampler() const {
293     if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER != fPoolManager.fDescType) {
294         return false;
295     }
296     if (fBindingVisibilities.count()) {
297         return false;
298     }
299     return true;
300 }
301 
302 ////////////////////////////////////////////////////////////////////////////////
303 
DescriptorPoolManager(VkDescriptorSetLayout layout,VkDescriptorType type,uint32_t descCountPerSet)304 GrVkDescriptorSetManager::DescriptorPoolManager::DescriptorPoolManager(
305         VkDescriptorSetLayout layout,
306         VkDescriptorType type,
307         uint32_t descCountPerSet)
308     : fDescLayout(layout)
309     , fDescType(type)
310     , fDescCountPerSet(descCountPerSet)
311     , fMaxDescriptors(kStartNumDescriptors)
312     , fCurrentDescriptorCount(0)
313     , fPool(nullptr) {
314 }
315 
getNewPool(GrVkGpu * gpu)316 bool GrVkDescriptorSetManager::DescriptorPoolManager::getNewPool(GrVkGpu* gpu) {
317     if (fPool) {
318         fPool->unref();
319         uint32_t newPoolSize = fMaxDescriptors + ((fMaxDescriptors + 1) >> 1);
320         if (newPoolSize < kMaxDescriptors) {
321             fMaxDescriptors = newPoolSize;
322         } else {
323             fMaxDescriptors = kMaxDescriptors;
324         }
325 
326     }
327     fPool = gpu->resourceProvider().findOrCreateCompatibleDescriptorPool(fDescType,
328                                                                          fMaxDescriptors);
329     return SkToBool(fPool);
330 }
331 
getNewDescriptorSet(GrVkGpu * gpu,VkDescriptorSet * ds)332 bool GrVkDescriptorSetManager::DescriptorPoolManager::getNewDescriptorSet(GrVkGpu* gpu,
333                                                                           VkDescriptorSet* ds) {
334     if (!fMaxDescriptors) {
335         return false;
336     }
337     fCurrentDescriptorCount += fDescCountPerSet;
338     if (!fPool || fCurrentDescriptorCount > fMaxDescriptors) {
339         if (!this->getNewPool(gpu) ) {
340             return false;
341         }
342         fCurrentDescriptorCount = fDescCountPerSet;
343     }
344 
345     VkDescriptorSetAllocateInfo dsAllocateInfo;
346     memset(&dsAllocateInfo, 0, sizeof(VkDescriptorSetAllocateInfo));
347     dsAllocateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
348     dsAllocateInfo.pNext = nullptr;
349     dsAllocateInfo.descriptorPool = fPool->descPool();
350     dsAllocateInfo.descriptorSetCount = 1;
351     dsAllocateInfo.pSetLayouts = &fDescLayout;
352     VkResult result;
353     GR_VK_CALL_RESULT(gpu, result, AllocateDescriptorSets(gpu->device(),
354                                                           &dsAllocateInfo,
355                                                           ds));
356     return result == VK_SUCCESS;
357 }
358 
freeGPUResources(GrVkGpu * gpu)359 void GrVkDescriptorSetManager::DescriptorPoolManager::freeGPUResources(GrVkGpu* gpu) {
360     if (fDescLayout) {
361         GR_VK_CALL(gpu->vkInterface(), DestroyDescriptorSetLayout(gpu->device(), fDescLayout,
362                                                                   nullptr));
363         fDescLayout = VK_NULL_HANDLE;
364     }
365 
366     if (fPool) {
367         fPool->unref();
368         fPool = nullptr;
369     }
370 }
371 
372