• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2018 The Dawn Authors
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 //     http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 #include "dawn_native/vulkan/BindGroupLayoutVk.h"
16 
17 #include "common/BitSetIterator.h"
18 #include "common/ityp_vector.h"
19 #include "dawn_native/vulkan/BindGroupVk.h"
20 #include "dawn_native/vulkan/DescriptorSetAllocator.h"
21 #include "dawn_native/vulkan/DeviceVk.h"
22 #include "dawn_native/vulkan/FencedDeleter.h"
23 #include "dawn_native/vulkan/UtilsVulkan.h"
24 #include "dawn_native/vulkan/VulkanError.h"
25 
26 #include <map>
27 
28 namespace dawn_native { namespace vulkan {
29 
30     namespace {
31 
VulkanShaderStageFlags(wgpu::ShaderStage stages)32         VkShaderStageFlags VulkanShaderStageFlags(wgpu::ShaderStage stages) {
33             VkShaderStageFlags flags = 0;
34 
35             if (stages & wgpu::ShaderStage::Vertex) {
36                 flags |= VK_SHADER_STAGE_VERTEX_BIT;
37             }
38             if (stages & wgpu::ShaderStage::Fragment) {
39                 flags |= VK_SHADER_STAGE_FRAGMENT_BIT;
40             }
41             if (stages & wgpu::ShaderStage::Compute) {
42                 flags |= VK_SHADER_STAGE_COMPUTE_BIT;
43             }
44 
45             return flags;
46         }
47 
48     }  // anonymous namespace
49 
VulkanDescriptorType(const BindingInfo & bindingInfo)50     VkDescriptorType VulkanDescriptorType(const BindingInfo& bindingInfo) {
51         switch (bindingInfo.bindingType) {
52             case BindingInfoType::Buffer:
53                 switch (bindingInfo.buffer.type) {
54                     case wgpu::BufferBindingType::Uniform:
55                         if (bindingInfo.buffer.hasDynamicOffset) {
56                             return VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
57                         }
58                         return VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
59                     case wgpu::BufferBindingType::Storage:
60                     case kInternalStorageBufferBinding:
61                     case wgpu::BufferBindingType::ReadOnlyStorage:
62                         if (bindingInfo.buffer.hasDynamicOffset) {
63                             return VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC;
64                         }
65                         return VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
66                     case wgpu::BufferBindingType::Undefined:
67                         UNREACHABLE();
68                 }
69             case BindingInfoType::Sampler:
70                 return VK_DESCRIPTOR_TYPE_SAMPLER;
71             case BindingInfoType::Texture:
72             case BindingInfoType::ExternalTexture:
73                 return VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
74             case BindingInfoType::StorageTexture:
75                 return VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
76         }
77         UNREACHABLE();
78     }
79 
80     // static
Create(Device * device,const BindGroupLayoutDescriptor * descriptor,PipelineCompatibilityToken pipelineCompatibilityToken)81     ResultOrError<Ref<BindGroupLayout>> BindGroupLayout::Create(
82         Device* device,
83         const BindGroupLayoutDescriptor* descriptor,
84         PipelineCompatibilityToken pipelineCompatibilityToken) {
85         Ref<BindGroupLayout> bgl =
86             AcquireRef(new BindGroupLayout(device, descriptor, pipelineCompatibilityToken));
87         DAWN_TRY(bgl->Initialize());
88         return bgl;
89     }
90 
Initialize()91     MaybeError BindGroupLayout::Initialize() {
92         // Compute the bindings that will be chained in the DescriptorSetLayout create info. We add
93         // one entry per binding set. This might be optimized by computing continuous ranges of
94         // bindings of the same type.
95         ityp::vector<BindingIndex, VkDescriptorSetLayoutBinding> bindings;
96         bindings.reserve(GetBindingCount());
97 
98         for (const auto& it : GetBindingMap()) {
99             BindingIndex bindingIndex = it.second;
100             const BindingInfo& bindingInfo = GetBindingInfo(bindingIndex);
101 
102             VkDescriptorSetLayoutBinding vkBinding;
103             vkBinding.binding = static_cast<uint32_t>(bindingIndex);
104             // TODO(dawn:728) In the future, special handling will be needed for external textures
105             // here because they encompass multiple views.
106             vkBinding.descriptorType = VulkanDescriptorType(bindingInfo);
107             vkBinding.descriptorCount = 1;
108             vkBinding.stageFlags = VulkanShaderStageFlags(bindingInfo.visibility);
109             vkBinding.pImmutableSamplers = nullptr;
110 
111             bindings.emplace_back(vkBinding);
112         }
113 
114         VkDescriptorSetLayoutCreateInfo createInfo;
115         createInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
116         createInfo.pNext = nullptr;
117         createInfo.flags = 0;
118         createInfo.bindingCount = static_cast<uint32_t>(bindings.size());
119         createInfo.pBindings = bindings.data();
120 
121         Device* device = ToBackend(GetDevice());
122         DAWN_TRY(CheckVkSuccess(device->fn.CreateDescriptorSetLayout(
123                                     device->GetVkDevice(), &createInfo, nullptr, &*mHandle),
124                                 "CreateDescriptorSetLayout"));
125 
126         // Compute the size of descriptor pools used for this layout.
127         std::map<VkDescriptorType, uint32_t> descriptorCountPerType;
128 
129         for (BindingIndex bindingIndex{0}; bindingIndex < GetBindingCount(); ++bindingIndex) {
130             // TODO(dawn:728) In the future, special handling will be needed for external textures
131             // here because they encompass multiple views.
132             VkDescriptorType vulkanType = VulkanDescriptorType(GetBindingInfo(bindingIndex));
133 
134             // map::operator[] will return 0 if the key doesn't exist.
135             descriptorCountPerType[vulkanType]++;
136         }
137 
138         // TODO(enga): Consider deduping allocators for layouts with the same descriptor type
139         // counts.
140         mDescriptorSetAllocator =
141             DescriptorSetAllocator::Create(this, std::move(descriptorCountPerType));
142 
143         SetLabelImpl();
144 
145         return {};
146     }
147 
BindGroupLayout(DeviceBase * device,const BindGroupLayoutDescriptor * descriptor,PipelineCompatibilityToken pipelineCompatibilityToken)148     BindGroupLayout::BindGroupLayout(DeviceBase* device,
149                                      const BindGroupLayoutDescriptor* descriptor,
150                                      PipelineCompatibilityToken pipelineCompatibilityToken)
151         : BindGroupLayoutBase(device, descriptor, pipelineCompatibilityToken),
152           mBindGroupAllocator(MakeFrontendBindGroupAllocator<BindGroup>(4096)) {
153     }
154 
155     BindGroupLayout::~BindGroupLayout() = default;
156 
DestroyImpl()157     void BindGroupLayout::DestroyImpl() {
158         BindGroupLayoutBase::DestroyImpl();
159 
160         Device* device = ToBackend(GetDevice());
161 
162         // DescriptorSetLayout aren't used by execution on the GPU and can be deleted at any time,
163         // so we can destroy mHandle immediately instead of using the FencedDeleter.
164         // (Swiftshader implements this wrong b/154522740).
165         // In practice, the GPU is done with all descriptor sets because bind group deallocation
166         // refs the bind group layout so that once the bind group is finished being used, we can
167         // recycle its descriptor set.
168         if (mHandle != VK_NULL_HANDLE) {
169             device->fn.DestroyDescriptorSetLayout(device->GetVkDevice(), mHandle, nullptr);
170             mHandle = VK_NULL_HANDLE;
171         }
172         mDescriptorSetAllocator = nullptr;
173     }
174 
GetHandle() const175     VkDescriptorSetLayout BindGroupLayout::GetHandle() const {
176         return mHandle;
177     }
178 
AllocateBindGroup(Device * device,const BindGroupDescriptor * descriptor)179     ResultOrError<Ref<BindGroup>> BindGroupLayout::AllocateBindGroup(
180         Device* device,
181         const BindGroupDescriptor* descriptor) {
182         DescriptorSetAllocation descriptorSetAllocation;
183         DAWN_TRY_ASSIGN(descriptorSetAllocation, mDescriptorSetAllocator->Allocate());
184 
185         return AcquireRef(
186             mBindGroupAllocator.Allocate(device, descriptor, descriptorSetAllocation));
187     }
188 
DeallocateBindGroup(BindGroup * bindGroup,DescriptorSetAllocation * descriptorSetAllocation)189     void BindGroupLayout::DeallocateBindGroup(BindGroup* bindGroup,
190                                               DescriptorSetAllocation* descriptorSetAllocation) {
191         mDescriptorSetAllocator->Deallocate(descriptorSetAllocation);
192         mBindGroupAllocator.Deallocate(bindGroup);
193     }
194 
SetLabelImpl()195     void BindGroupLayout::SetLabelImpl() {
196         SetDebugName(ToBackend(GetDevice()), VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT,
197                      reinterpret_cast<uint64_t&>(mHandle), "Dawn_BindGroupLayout", GetLabel());
198     }
199 
200 }}  // namespace dawn_native::vulkan
201