• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2018 The Amber Authors.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 //     http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 #include "src/vulkan/pipeline.h"
16 
17 #include <algorithm>
18 #include <limits>
19 #include <utility>
20 
21 #include "src/command.h"
22 #include "src/engine.h"
23 #include "src/make_unique.h"
24 #include "src/vulkan/buffer_descriptor.h"
25 #include "src/vulkan/compute_pipeline.h"
26 #include "src/vulkan/device.h"
27 #include "src/vulkan/graphics_pipeline.h"
28 #include "src/vulkan/image_descriptor.h"
29 #include "src/vulkan/sampler_descriptor.h"
30 
31 namespace amber {
32 namespace vulkan {
33 namespace {
34 
35 const char* kDefaultEntryPointName = "main";
36 
37 }  // namespace
38 
Pipeline(PipelineType type,Device * device,uint32_t fence_timeout_ms,const std::vector<VkPipelineShaderStageCreateInfo> & shader_stage_info)39 Pipeline::Pipeline(
40     PipelineType type,
41     Device* device,
42     uint32_t fence_timeout_ms,
43     const std::vector<VkPipelineShaderStageCreateInfo>& shader_stage_info)
44     : device_(device),
45       pipeline_type_(type),
46       shader_stage_info_(shader_stage_info),
47       fence_timeout_ms_(fence_timeout_ms) {}
48 
~Pipeline()49 Pipeline::~Pipeline() {
50   // Command must be reset before we destroy descriptors or we get a validation
51   // error.
52   command_ = nullptr;
53 
54   for (auto& info : descriptor_set_info_) {
55     if (info.layout != VK_NULL_HANDLE) {
56       device_->GetPtrs()->vkDestroyDescriptorSetLayout(device_->GetVkDevice(),
57                                                        info.layout, nullptr);
58     }
59 
60     if (info.empty)
61       continue;
62 
63     if (info.pool != VK_NULL_HANDLE) {
64       device_->GetPtrs()->vkDestroyDescriptorPool(device_->GetVkDevice(),
65                                                   info.pool, nullptr);
66     }
67   }
68 }
69 
AsGraphics()70 GraphicsPipeline* Pipeline::AsGraphics() {
71   return static_cast<GraphicsPipeline*>(this);
72 }
73 
AsCompute()74 ComputePipeline* Pipeline::AsCompute() {
75   return static_cast<ComputePipeline*>(this);
76 }
77 
Initialize(CommandPool * pool)78 Result Pipeline::Initialize(CommandPool* pool) {
79   push_constant_ = MakeUnique<PushConstant>(device_);
80 
81   command_ = MakeUnique<CommandBuffer>(device_, pool);
82   return command_->Initialize();
83 }
84 
CreateDescriptorSetLayouts()85 Result Pipeline::CreateDescriptorSetLayouts() {
86   for (auto& info : descriptor_set_info_) {
87     VkDescriptorSetLayoutCreateInfo desc_info =
88         VkDescriptorSetLayoutCreateInfo();
89     desc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
90 
91     // If there are no descriptors for this descriptor set we only
92     // need to create its layout and there will be no bindings.
93     std::vector<VkDescriptorSetLayoutBinding> bindings;
94     for (auto& desc : info.descriptors) {
95       bindings.emplace_back();
96       bindings.back().binding = desc->GetBinding();
97       bindings.back().descriptorType = desc->GetVkDescriptorType();
98       bindings.back().descriptorCount = desc->GetDescriptorCount();
99       bindings.back().stageFlags = VK_SHADER_STAGE_ALL;
100     }
101     desc_info.bindingCount = static_cast<uint32_t>(bindings.size());
102     desc_info.pBindings = bindings.data();
103 
104     if (device_->GetPtrs()->vkCreateDescriptorSetLayout(
105             device_->GetVkDevice(), &desc_info, nullptr, &info.layout) !=
106         VK_SUCCESS) {
107       return Result("Vulkan::Calling vkCreateDescriptorSetLayout Fail");
108     }
109   }
110 
111   return {};
112 }
113 
CreateDescriptorPools()114 Result Pipeline::CreateDescriptorPools() {
115   for (auto& info : descriptor_set_info_) {
116     if (info.empty)
117       continue;
118 
119     std::vector<VkDescriptorPoolSize> pool_sizes;
120     for (auto& desc : info.descriptors) {
121       VkDescriptorType type = desc->GetVkDescriptorType();
122       auto it = find_if(pool_sizes.begin(), pool_sizes.end(),
123                         [&type](const VkDescriptorPoolSize& size) {
124                           return size.type == type;
125                         });
126       if (it != pool_sizes.end()) {
127         it->descriptorCount += desc->GetDescriptorCount();
128         continue;
129       }
130 
131       pool_sizes.emplace_back();
132       pool_sizes.back().type = type;
133       pool_sizes.back().descriptorCount = desc->GetDescriptorCount();
134     }
135 
136     VkDescriptorPoolCreateInfo pool_info = VkDescriptorPoolCreateInfo();
137     pool_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
138     pool_info.maxSets = 1;
139     pool_info.poolSizeCount = static_cast<uint32_t>(pool_sizes.size());
140     pool_info.pPoolSizes = pool_sizes.data();
141 
142     if (device_->GetPtrs()->vkCreateDescriptorPool(device_->GetVkDevice(),
143                                                    &pool_info, nullptr,
144                                                    &info.pool) != VK_SUCCESS) {
145       return Result("Vulkan::Calling vkCreateDescriptorPool Fail");
146     }
147   }
148 
149   return {};
150 }
151 
CreateDescriptorSets()152 Result Pipeline::CreateDescriptorSets() {
153   for (size_t i = 0; i < descriptor_set_info_.size(); ++i) {
154     if (descriptor_set_info_[i].empty)
155       continue;
156 
157     VkDescriptorSetAllocateInfo desc_set_info = VkDescriptorSetAllocateInfo();
158     desc_set_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
159     desc_set_info.descriptorPool = descriptor_set_info_[i].pool;
160     desc_set_info.descriptorSetCount = 1;
161     desc_set_info.pSetLayouts = &descriptor_set_info_[i].layout;
162 
163     VkDescriptorSet desc_set = VK_NULL_HANDLE;
164     if (device_->GetPtrs()->vkAllocateDescriptorSets(
165             device_->GetVkDevice(), &desc_set_info, &desc_set) != VK_SUCCESS) {
166       return Result("Vulkan::Calling vkAllocateDescriptorSets Fail");
167     }
168     descriptor_set_info_[i].vk_desc_set = desc_set;
169   }
170 
171   return {};
172 }
173 
CreateVkPipelineLayout(VkPipelineLayout * pipeline_layout)174 Result Pipeline::CreateVkPipelineLayout(VkPipelineLayout* pipeline_layout) {
175   Result r = CreateVkDescriptorRelatedObjectsIfNeeded();
176   if (!r.IsSuccess())
177     return r;
178 
179   std::vector<VkDescriptorSetLayout> descriptor_set_layouts;
180   for (const auto& desc_set : descriptor_set_info_)
181     descriptor_set_layouts.push_back(desc_set.layout);
182 
183   VkPipelineLayoutCreateInfo pipeline_layout_info =
184       VkPipelineLayoutCreateInfo();
185   pipeline_layout_info.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
186   pipeline_layout_info.setLayoutCount =
187       static_cast<uint32_t>(descriptor_set_layouts.size());
188   pipeline_layout_info.pSetLayouts = descriptor_set_layouts.data();
189 
190   VkPushConstantRange push_const_range =
191       push_constant_->GetVkPushConstantRange();
192   if (push_const_range.size > 0) {
193     pipeline_layout_info.pushConstantRangeCount = 1U;
194     pipeline_layout_info.pPushConstantRanges = &push_const_range;
195   }
196 
197   if (device_->GetPtrs()->vkCreatePipelineLayout(
198           device_->GetVkDevice(), &pipeline_layout_info, nullptr,
199           pipeline_layout) != VK_SUCCESS) {
200     return Result("Vulkan::Calling vkCreatePipelineLayout Fail");
201   }
202 
203   return {};
204 }
205 
CreateVkDescriptorRelatedObjectsIfNeeded()206 Result Pipeline::CreateVkDescriptorRelatedObjectsIfNeeded() {
207   if (descriptor_related_objects_already_created_)
208     return {};
209 
210   Result r = CreateDescriptorSetLayouts();
211   if (!r.IsSuccess())
212     return r;
213 
214   r = CreateDescriptorPools();
215   if (!r.IsSuccess())
216     return r;
217 
218   r = CreateDescriptorSets();
219   if (!r.IsSuccess())
220     return r;
221 
222   descriptor_related_objects_already_created_ = true;
223   return {};
224 }
225 
UpdateDescriptorSetsIfNeeded()226 void Pipeline::UpdateDescriptorSetsIfNeeded() {
227   for (auto& info : descriptor_set_info_) {
228     for (auto& desc : info.descriptors)
229       desc->UpdateDescriptorSetIfNeeded(info.vk_desc_set);
230   }
231 }
232 
RecordPushConstant(const VkPipelineLayout & pipeline_layout)233 Result Pipeline::RecordPushConstant(const VkPipelineLayout& pipeline_layout) {
234   return push_constant_->RecordPushConstantVkCommand(command_.get(),
235                                                      pipeline_layout);
236 }
237 
AddPushConstantBuffer(const Buffer * buf,uint32_t offset)238 Result Pipeline::AddPushConstantBuffer(const Buffer* buf, uint32_t offset) {
239   if (!buf)
240     return Result("Missing push constant buffer data");
241   return push_constant_->AddBuffer(buf, offset);
242 }
243 
GetDescriptorSlot(uint32_t desc_set,uint32_t binding,Descriptor ** desc)244 Result Pipeline::GetDescriptorSlot(uint32_t desc_set,
245                                    uint32_t binding,
246                                    Descriptor** desc) {
247   *desc = nullptr;
248 
249   if (desc_set >= descriptor_set_info_.size()) {
250     for (size_t i = descriptor_set_info_.size();
251          i <= static_cast<size_t>(desc_set); ++i) {
252       descriptor_set_info_.emplace_back();
253     }
254   }
255 
256   if (descriptor_set_info_[desc_set].empty &&
257       descriptor_related_objects_already_created_) {
258     return Result(
259         "Vulkan: Pipeline descriptor related objects were already created but "
260         "try to put data on empty descriptor set '" +
261         std::to_string(desc_set) +
262         "'. Note that all used descriptor sets must be allocated before the "
263         "first compute or draw.");
264   }
265   descriptor_set_info_[desc_set].empty = false;
266 
267   auto& descriptors = descriptor_set_info_[desc_set].descriptors;
268   for (auto& descriptor : descriptors) {
269     if (descriptor->GetBinding() == binding)
270       *desc = descriptor.get();
271   }
272 
273   return {};
274 }
275 
AddBufferDescriptor(const BufferCommand * cmd)276 Result Pipeline::AddBufferDescriptor(const BufferCommand* cmd) {
277   if (cmd == nullptr)
278     return Result("Pipeline::AddBufferDescriptor BufferCommand is nullptr");
279   if (!cmd->IsSSBO() && !cmd->IsUniform() && !cmd->IsStorageImage() &&
280       !cmd->IsSampledImage() && !cmd->IsCombinedImageSampler() &&
281       !cmd->IsUniformTexelBuffer() && !cmd->IsStorageTexelBuffer() &&
282       !cmd->IsUniformDynamic() && !cmd->IsSSBODynamic()) {
283     return Result("Pipeline::AddBufferDescriptor not supported buffer type");
284   }
285 
286   Descriptor* desc;
287   Result r =
288       GetDescriptorSlot(cmd->GetDescriptorSet(), cmd->GetBinding(), &desc);
289   if (!r.IsSuccess())
290     return r;
291 
292   auto& descriptors = descriptor_set_info_[cmd->GetDescriptorSet()].descriptors;
293 
294   bool is_image = false;
295   DescriptorType desc_type = DescriptorType::kUniformBuffer;
296 
297   if (cmd->IsStorageImage()) {
298     desc_type = DescriptorType::kStorageImage;
299     is_image = true;
300   } else if (cmd->IsSampledImage()) {
301     desc_type = DescriptorType::kSampledImage;
302     is_image = true;
303   } else if (cmd->IsCombinedImageSampler()) {
304     desc_type = DescriptorType::kCombinedImageSampler;
305     is_image = true;
306   } else if (cmd->IsUniformTexelBuffer()) {
307     desc_type = DescriptorType::kUniformTexelBuffer;
308   } else if (cmd->IsStorageTexelBuffer()) {
309     desc_type = DescriptorType::kStorageTexelBuffer;
310   } else if (cmd->IsSSBO()) {
311     desc_type = DescriptorType::kStorageBuffer;
312   } else if (cmd->IsUniformDynamic()) {
313     desc_type = DescriptorType::kUniformBufferDynamic;
314   } else if (cmd->IsSSBODynamic()) {
315     desc_type = DescriptorType::kStorageBufferDynamic;
316   }
317 
318   if (desc == nullptr) {
319     if (is_image) {
320       auto image_desc = MakeUnique<ImageDescriptor>(
321           cmd->GetBuffer(), desc_type, device_, cmd->GetBaseMipLevel(),
322           cmd->GetDescriptorSet(), cmd->GetBinding());
323       if (cmd->IsCombinedImageSampler())
324         image_desc->SetAmberSampler(cmd->GetSampler());
325       descriptors.push_back(std::move(image_desc));
326     } else {
327       auto buffer_desc = MakeUnique<BufferDescriptor>(
328           cmd->GetBuffer(), desc_type, device_, cmd->GetDescriptorSet(),
329           cmd->GetBinding());
330       descriptors.push_back(std::move(buffer_desc));
331     }
332     desc = descriptors.back().get();
333   } else {
334     if (desc->GetDescriptorType() != desc_type) {
335       return Result(
336           "Descriptors bound to the same binding needs to have matching "
337           "descriptor types");
338     }
339     // Check that the buffer is not added already.
340     const auto& buffers = desc->AsBufferBackedDescriptor()->GetAmberBuffers();
341     if (std::find(buffers.begin(), buffers.end(), cmd->GetBuffer()) !=
342         buffers.end()) {
343       return Result("Buffer has been added already");
344     }
345     desc->AsBufferBackedDescriptor()->AddAmberBuffer(cmd->GetBuffer());
346   }
347 
348   if (cmd->IsUniformDynamic() || cmd->IsSSBODynamic())
349     desc->AsBufferDescriptor()->AddDynamicOffset(cmd->GetDynamicOffset());
350 
351   if (cmd->IsSSBO() && !desc->IsStorageBuffer()) {
352     return Result(
353         "Vulkan::AddBufferDescriptor BufferCommand for SSBO uses wrong "
354         "descriptor "
355         "set and binding");
356   }
357 
358   if (cmd->IsUniform() && !desc->IsUniformBuffer()) {
359     return Result(
360         "Vulkan::AddBufferDescriptor BufferCommand for UBO uses wrong "
361         "descriptor set "
362         "and binding");
363   }
364 
365   return {};
366 }
367 
AddSamplerDescriptor(const SamplerCommand * cmd)368 Result Pipeline::AddSamplerDescriptor(const SamplerCommand* cmd) {
369   if (cmd == nullptr)
370     return Result("Pipeline::AddSamplerDescriptor SamplerCommand is nullptr");
371 
372   Descriptor* desc;
373   Result r =
374       GetDescriptorSlot(cmd->GetDescriptorSet(), cmd->GetBinding(), &desc);
375   if (!r.IsSuccess())
376     return r;
377 
378   auto& descriptors = descriptor_set_info_[cmd->GetDescriptorSet()].descriptors;
379 
380   if (desc == nullptr) {
381     auto sampler_desc = MakeUnique<SamplerDescriptor>(
382         cmd->GetSampler(), DescriptorType::kSampler, device_,
383         cmd->GetDescriptorSet(), cmd->GetBinding());
384     descriptors.push_back(std::move(sampler_desc));
385   } else {
386     if (desc->GetDescriptorType() != DescriptorType::kSampler) {
387       return Result(
388           "Descriptors bound to the same binding needs to have matching "
389           "descriptor types");
390     }
391     desc->AsSamplerDescriptor()->AddAmberSampler(cmd->GetSampler());
392   }
393 
394   return {};
395 }
396 
SendDescriptorDataToDeviceIfNeeded()397 Result Pipeline::SendDescriptorDataToDeviceIfNeeded() {
398   {
399     CommandBufferGuard guard(GetCommandBuffer());
400     if (!guard.IsRecording())
401       return guard.GetResult();
402 
403     for (auto& info : descriptor_set_info_) {
404       for (auto& desc : info.descriptors) {
405         Result r = desc->CreateResourceIfNeeded();
406         if (!r.IsSuccess())
407           return r;
408       }
409     }
410 
411     // Note that if a buffer for a descriptor is host accessible and
412     // does not need to record a command to copy data to device, it
413     // directly writes data to the buffer. The direct write must be
414     // done after resizing backed buffer i.e., copying data to the new
415     // buffer from the old one. Thus, we must submit commands here to
416     // guarantee this.
417     Result r = guard.Submit(GetFenceTimeout());
418     if (!r.IsSuccess())
419       return r;
420   }
421 
422   CommandBufferGuard guard(GetCommandBuffer());
423   if (!guard.IsRecording())
424     return guard.GetResult();
425 
426   for (auto& info : descriptor_set_info_) {
427     for (auto& desc : info.descriptors) {
428       Result r = desc->RecordCopyDataToResourceIfNeeded(command_.get());
429       if (!r.IsSuccess())
430         return r;
431     }
432   }
433   return guard.Submit(GetFenceTimeout());
434 }
435 
BindVkDescriptorSets(const VkPipelineLayout & pipeline_layout)436 void Pipeline::BindVkDescriptorSets(const VkPipelineLayout& pipeline_layout) {
437   for (size_t i = 0; i < descriptor_set_info_.size(); ++i) {
438     if (descriptor_set_info_[i].empty)
439       continue;
440 
441     // Sort descriptors by binding number to get correct order of dynamic
442     // offsets.
443     typedef std::pair<uint32_t, std::vector<uint32_t>> binding_offsets_pair;
444     std::vector<binding_offsets_pair> binding_offsets;
445     for (const auto& desc : descriptor_set_info_[i].descriptors) {
446       binding_offsets.push_back(
447           {desc->GetBinding(), desc->GetDynamicOffsets()});
448     }
449 
450     std::sort(std::begin(binding_offsets), std::end(binding_offsets),
451               [](const binding_offsets_pair& a, const binding_offsets_pair& b) {
452                 return a.first < b.first;
453               });
454 
455     // Add the sorted dynamic offsets.
456     std::vector<uint32_t> dynamic_offsets;
457     for (const auto& binding_offset : binding_offsets) {
458       for (auto offset : binding_offset.second) {
459         dynamic_offsets.push_back(offset);
460       }
461     }
462 
463     device_->GetPtrs()->vkCmdBindDescriptorSets(
464         command_->GetVkCommandBuffer(),
465         IsGraphics() ? VK_PIPELINE_BIND_POINT_GRAPHICS
466                      : VK_PIPELINE_BIND_POINT_COMPUTE,
467         pipeline_layout, static_cast<uint32_t>(i), 1,
468         &descriptor_set_info_[i].vk_desc_set,
469         static_cast<uint32_t>(dynamic_offsets.size()), dynamic_offsets.data());
470   }
471 }
472 
ReadbackDescriptorsToHostDataQueue()473 Result Pipeline::ReadbackDescriptorsToHostDataQueue() {
474   {
475     CommandBufferGuard guard(GetCommandBuffer());
476     if (!guard.IsRecording())
477       return guard.GetResult();
478 
479     for (auto& desc_set : descriptor_set_info_) {
480       for (auto& desc : desc_set.descriptors)
481         desc->RecordCopyDataToHost(command_.get());
482     }
483 
484     Result r = guard.Submit(GetFenceTimeout());
485     if (!r.IsSuccess())
486       return r;
487   }
488 
489   for (auto& desc_set : descriptor_set_info_) {
490     for (auto& desc : desc_set.descriptors) {
491       Result r = desc->MoveResourceToBufferOutput();
492       if (!r.IsSuccess())
493         return r;
494     }
495   }
496 
497   return {};
498 }
499 
GetEntryPointName(VkShaderStageFlagBits stage) const500 const char* Pipeline::GetEntryPointName(VkShaderStageFlagBits stage) const {
501   auto it = entry_points_.find(stage);
502   if (it != entry_points_.end())
503     return it->second.c_str();
504 
505   return kDefaultEntryPointName;
506 }
507 
508 }  // namespace vulkan
509 }  // namespace amber
510