1 // Copyright 2018 The Amber Authors.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include "src/vulkan/pipeline.h"
16
17 #include <algorithm>
18 #include <limits>
19 #include <utility>
20
21 #include "src/command.h"
22 #include "src/engine.h"
23 #include "src/make_unique.h"
24 #include "src/vulkan/buffer_descriptor.h"
25 #include "src/vulkan/compute_pipeline.h"
26 #include "src/vulkan/device.h"
27 #include "src/vulkan/graphics_pipeline.h"
28 #include "src/vulkan/image_descriptor.h"
29 #include "src/vulkan/sampler_descriptor.h"
30
31 namespace amber {
32 namespace vulkan {
33 namespace {
34
35 const char* kDefaultEntryPointName = "main";
36
37 } // namespace
38
Pipeline(PipelineType type,Device * device,uint32_t fence_timeout_ms,const std::vector<VkPipelineShaderStageCreateInfo> & shader_stage_info)39 Pipeline::Pipeline(
40 PipelineType type,
41 Device* device,
42 uint32_t fence_timeout_ms,
43 const std::vector<VkPipelineShaderStageCreateInfo>& shader_stage_info)
44 : device_(device),
45 pipeline_type_(type),
46 shader_stage_info_(shader_stage_info),
47 fence_timeout_ms_(fence_timeout_ms) {}
48
~Pipeline()49 Pipeline::~Pipeline() {
50 // Command must be reset before we destroy descriptors or we get a validation
51 // error.
52 command_ = nullptr;
53
54 for (auto& info : descriptor_set_info_) {
55 if (info.layout != VK_NULL_HANDLE) {
56 device_->GetPtrs()->vkDestroyDescriptorSetLayout(device_->GetVkDevice(),
57 info.layout, nullptr);
58 }
59
60 if (info.empty)
61 continue;
62
63 if (info.pool != VK_NULL_HANDLE) {
64 device_->GetPtrs()->vkDestroyDescriptorPool(device_->GetVkDevice(),
65 info.pool, nullptr);
66 }
67 }
68 }
69
AsGraphics()70 GraphicsPipeline* Pipeline::AsGraphics() {
71 return static_cast<GraphicsPipeline*>(this);
72 }
73
AsCompute()74 ComputePipeline* Pipeline::AsCompute() {
75 return static_cast<ComputePipeline*>(this);
76 }
77
Initialize(CommandPool * pool)78 Result Pipeline::Initialize(CommandPool* pool) {
79 push_constant_ = MakeUnique<PushConstant>(device_);
80
81 command_ = MakeUnique<CommandBuffer>(device_, pool);
82 return command_->Initialize();
83 }
84
CreateDescriptorSetLayouts()85 Result Pipeline::CreateDescriptorSetLayouts() {
86 for (auto& info : descriptor_set_info_) {
87 VkDescriptorSetLayoutCreateInfo desc_info =
88 VkDescriptorSetLayoutCreateInfo();
89 desc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
90
91 // If there are no descriptors for this descriptor set we only
92 // need to create its layout and there will be no bindings.
93 std::vector<VkDescriptorSetLayoutBinding> bindings;
94 for (auto& desc : info.descriptors) {
95 bindings.emplace_back();
96 bindings.back().binding = desc->GetBinding();
97 bindings.back().descriptorType = desc->GetVkDescriptorType();
98 bindings.back().descriptorCount = desc->GetDescriptorCount();
99 bindings.back().stageFlags = VK_SHADER_STAGE_ALL;
100 }
101 desc_info.bindingCount = static_cast<uint32_t>(bindings.size());
102 desc_info.pBindings = bindings.data();
103
104 if (device_->GetPtrs()->vkCreateDescriptorSetLayout(
105 device_->GetVkDevice(), &desc_info, nullptr, &info.layout) !=
106 VK_SUCCESS) {
107 return Result("Vulkan::Calling vkCreateDescriptorSetLayout Fail");
108 }
109 }
110
111 return {};
112 }
113
CreateDescriptorPools()114 Result Pipeline::CreateDescriptorPools() {
115 for (auto& info : descriptor_set_info_) {
116 if (info.empty)
117 continue;
118
119 std::vector<VkDescriptorPoolSize> pool_sizes;
120 for (auto& desc : info.descriptors) {
121 VkDescriptorType type = desc->GetVkDescriptorType();
122 auto it = find_if(pool_sizes.begin(), pool_sizes.end(),
123 [&type](const VkDescriptorPoolSize& size) {
124 return size.type == type;
125 });
126 if (it != pool_sizes.end()) {
127 it->descriptorCount += desc->GetDescriptorCount();
128 continue;
129 }
130
131 pool_sizes.emplace_back();
132 pool_sizes.back().type = type;
133 pool_sizes.back().descriptorCount = desc->GetDescriptorCount();
134 }
135
136 VkDescriptorPoolCreateInfo pool_info = VkDescriptorPoolCreateInfo();
137 pool_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
138 pool_info.maxSets = 1;
139 pool_info.poolSizeCount = static_cast<uint32_t>(pool_sizes.size());
140 pool_info.pPoolSizes = pool_sizes.data();
141
142 if (device_->GetPtrs()->vkCreateDescriptorPool(device_->GetVkDevice(),
143 &pool_info, nullptr,
144 &info.pool) != VK_SUCCESS) {
145 return Result("Vulkan::Calling vkCreateDescriptorPool Fail");
146 }
147 }
148
149 return {};
150 }
151
CreateDescriptorSets()152 Result Pipeline::CreateDescriptorSets() {
153 for (size_t i = 0; i < descriptor_set_info_.size(); ++i) {
154 if (descriptor_set_info_[i].empty)
155 continue;
156
157 VkDescriptorSetAllocateInfo desc_set_info = VkDescriptorSetAllocateInfo();
158 desc_set_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
159 desc_set_info.descriptorPool = descriptor_set_info_[i].pool;
160 desc_set_info.descriptorSetCount = 1;
161 desc_set_info.pSetLayouts = &descriptor_set_info_[i].layout;
162
163 VkDescriptorSet desc_set = VK_NULL_HANDLE;
164 if (device_->GetPtrs()->vkAllocateDescriptorSets(
165 device_->GetVkDevice(), &desc_set_info, &desc_set) != VK_SUCCESS) {
166 return Result("Vulkan::Calling vkAllocateDescriptorSets Fail");
167 }
168 descriptor_set_info_[i].vk_desc_set = desc_set;
169 }
170
171 return {};
172 }
173
CreateVkPipelineLayout(VkPipelineLayout * pipeline_layout)174 Result Pipeline::CreateVkPipelineLayout(VkPipelineLayout* pipeline_layout) {
175 Result r = CreateVkDescriptorRelatedObjectsIfNeeded();
176 if (!r.IsSuccess())
177 return r;
178
179 std::vector<VkDescriptorSetLayout> descriptor_set_layouts;
180 for (const auto& desc_set : descriptor_set_info_)
181 descriptor_set_layouts.push_back(desc_set.layout);
182
183 VkPipelineLayoutCreateInfo pipeline_layout_info =
184 VkPipelineLayoutCreateInfo();
185 pipeline_layout_info.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
186 pipeline_layout_info.setLayoutCount =
187 static_cast<uint32_t>(descriptor_set_layouts.size());
188 pipeline_layout_info.pSetLayouts = descriptor_set_layouts.data();
189
190 VkPushConstantRange push_const_range =
191 push_constant_->GetVkPushConstantRange();
192 if (push_const_range.size > 0) {
193 pipeline_layout_info.pushConstantRangeCount = 1U;
194 pipeline_layout_info.pPushConstantRanges = &push_const_range;
195 }
196
197 if (device_->GetPtrs()->vkCreatePipelineLayout(
198 device_->GetVkDevice(), &pipeline_layout_info, nullptr,
199 pipeline_layout) != VK_SUCCESS) {
200 return Result("Vulkan::Calling vkCreatePipelineLayout Fail");
201 }
202
203 return {};
204 }
205
CreateVkDescriptorRelatedObjectsIfNeeded()206 Result Pipeline::CreateVkDescriptorRelatedObjectsIfNeeded() {
207 if (descriptor_related_objects_already_created_)
208 return {};
209
210 Result r = CreateDescriptorSetLayouts();
211 if (!r.IsSuccess())
212 return r;
213
214 r = CreateDescriptorPools();
215 if (!r.IsSuccess())
216 return r;
217
218 r = CreateDescriptorSets();
219 if (!r.IsSuccess())
220 return r;
221
222 descriptor_related_objects_already_created_ = true;
223 return {};
224 }
225
UpdateDescriptorSetsIfNeeded()226 void Pipeline::UpdateDescriptorSetsIfNeeded() {
227 for (auto& info : descriptor_set_info_) {
228 for (auto& desc : info.descriptors)
229 desc->UpdateDescriptorSetIfNeeded(info.vk_desc_set);
230 }
231 }
232
RecordPushConstant(const VkPipelineLayout & pipeline_layout)233 Result Pipeline::RecordPushConstant(const VkPipelineLayout& pipeline_layout) {
234 return push_constant_->RecordPushConstantVkCommand(command_.get(),
235 pipeline_layout);
236 }
237
AddPushConstantBuffer(const Buffer * buf,uint32_t offset)238 Result Pipeline::AddPushConstantBuffer(const Buffer* buf, uint32_t offset) {
239 if (!buf)
240 return Result("Missing push constant buffer data");
241 return push_constant_->AddBuffer(buf, offset);
242 }
243
GetDescriptorSlot(uint32_t desc_set,uint32_t binding,Descriptor ** desc)244 Result Pipeline::GetDescriptorSlot(uint32_t desc_set,
245 uint32_t binding,
246 Descriptor** desc) {
247 *desc = nullptr;
248
249 if (desc_set >= descriptor_set_info_.size()) {
250 for (size_t i = descriptor_set_info_.size();
251 i <= static_cast<size_t>(desc_set); ++i) {
252 descriptor_set_info_.emplace_back();
253 }
254 }
255
256 if (descriptor_set_info_[desc_set].empty &&
257 descriptor_related_objects_already_created_) {
258 return Result(
259 "Vulkan: Pipeline descriptor related objects were already created but "
260 "try to put data on empty descriptor set '" +
261 std::to_string(desc_set) +
262 "'. Note that all used descriptor sets must be allocated before the "
263 "first compute or draw.");
264 }
265 descriptor_set_info_[desc_set].empty = false;
266
267 auto& descriptors = descriptor_set_info_[desc_set].descriptors;
268 for (auto& descriptor : descriptors) {
269 if (descriptor->GetBinding() == binding)
270 *desc = descriptor.get();
271 }
272
273 return {};
274 }
275
AddDescriptorBuffer(Buffer * amber_buffer)276 Result Pipeline::AddDescriptorBuffer(Buffer* amber_buffer) {
277 // Don't add the buffer if it's already added.
278 const auto& buffer = std::find_if(
279 descriptor_buffers_.begin(), descriptor_buffers_.end(),
280 [&](const Buffer* buf) { return buf == amber_buffer; });
281 if (buffer != descriptor_buffers_.end()) {
282 return {};
283 }
284 descriptor_buffers_.push_back(amber_buffer);
285 return {};
286 }
287
AddBufferDescriptor(const BufferCommand * cmd)288 Result Pipeline::AddBufferDescriptor(const BufferCommand* cmd) {
289 if (cmd == nullptr)
290 return Result("Pipeline::AddBufferDescriptor BufferCommand is nullptr");
291 if (!cmd->IsSSBO() && !cmd->IsUniform() && !cmd->IsStorageImage() &&
292 !cmd->IsSampledImage() && !cmd->IsCombinedImageSampler() &&
293 !cmd->IsUniformTexelBuffer() && !cmd->IsStorageTexelBuffer() &&
294 !cmd->IsUniformDynamic() && !cmd->IsSSBODynamic()) {
295 return Result("Pipeline::AddBufferDescriptor not supported buffer type");
296 }
297
298 Descriptor* desc;
299 Result r =
300 GetDescriptorSlot(cmd->GetDescriptorSet(), cmd->GetBinding(), &desc);
301 if (!r.IsSuccess())
302 return r;
303
304 auto& descriptors = descriptor_set_info_[cmd->GetDescriptorSet()].descriptors;
305
306 bool is_image = false;
307 DescriptorType desc_type = DescriptorType::kUniformBuffer;
308
309 if (cmd->IsStorageImage()) {
310 desc_type = DescriptorType::kStorageImage;
311 is_image = true;
312 } else if (cmd->IsSampledImage()) {
313 desc_type = DescriptorType::kSampledImage;
314 is_image = true;
315 } else if (cmd->IsCombinedImageSampler()) {
316 desc_type = DescriptorType::kCombinedImageSampler;
317 is_image = true;
318 } else if (cmd->IsUniformTexelBuffer()) {
319 desc_type = DescriptorType::kUniformTexelBuffer;
320 } else if (cmd->IsStorageTexelBuffer()) {
321 desc_type = DescriptorType::kStorageTexelBuffer;
322 } else if (cmd->IsSSBO()) {
323 desc_type = DescriptorType::kStorageBuffer;
324 } else if (cmd->IsUniformDynamic()) {
325 desc_type = DescriptorType::kUniformBufferDynamic;
326 } else if (cmd->IsSSBODynamic()) {
327 desc_type = DescriptorType::kStorageBufferDynamic;
328 }
329
330 if (desc == nullptr) {
331 if (is_image) {
332 auto image_desc = MakeUnique<ImageDescriptor>(
333 cmd->GetBuffer(), desc_type, device_, cmd->GetBaseMipLevel(),
334 cmd->GetDescriptorSet(), cmd->GetBinding(), this);
335 if (cmd->IsCombinedImageSampler())
336 image_desc->SetAmberSampler(cmd->GetSampler());
337
338 descriptors.push_back(std::move(image_desc));
339 } else {
340 auto buffer_desc = MakeUnique<BufferDescriptor>(
341 cmd->GetBuffer(), desc_type, device_, cmd->GetDescriptorSet(),
342 cmd->GetBinding(), this);
343 descriptors.push_back(std::move(buffer_desc));
344 }
345 AddDescriptorBuffer(cmd->GetBuffer());
346 desc = descriptors.back().get();
347 } else {
348 if (desc->GetDescriptorType() != desc_type) {
349 return Result(
350 "Descriptors bound to the same binding needs to have matching "
351 "descriptor types");
352 }
353 desc->AsBufferBackedDescriptor()->AddAmberBuffer(cmd->GetBuffer());
354 AddDescriptorBuffer(cmd->GetBuffer());
355 }
356
357 if (cmd->IsUniformDynamic() || cmd->IsSSBODynamic())
358 desc->AsBufferDescriptor()->AddDynamicOffset(cmd->GetDynamicOffset());
359
360 if (cmd->IsUniform() || cmd->IsUniformDynamic() || cmd->IsSSBO() ||
361 cmd->IsSSBODynamic()) {
362 desc->AsBufferDescriptor()->AddDescriptorOffset(cmd->GetDescriptorOffset());
363 desc->AsBufferDescriptor()->AddDescriptorRange(cmd->GetDescriptorRange());
364 }
365
366 if (cmd->IsSSBO() && !desc->IsStorageBuffer()) {
367 return Result(
368 "Vulkan::AddBufferDescriptor BufferCommand for SSBO uses wrong "
369 "descriptor "
370 "set and binding");
371 }
372
373 if (cmd->IsUniform() && !desc->IsUniformBuffer()) {
374 return Result(
375 "Vulkan::AddBufferDescriptor BufferCommand for UBO uses wrong "
376 "descriptor set "
377 "and binding");
378 }
379
380 return {};
381 }
382
AddSamplerDescriptor(const SamplerCommand * cmd)383 Result Pipeline::AddSamplerDescriptor(const SamplerCommand* cmd) {
384 if (cmd == nullptr)
385 return Result("Pipeline::AddSamplerDescriptor SamplerCommand is nullptr");
386
387 Descriptor* desc;
388 Result r =
389 GetDescriptorSlot(cmd->GetDescriptorSet(), cmd->GetBinding(), &desc);
390 if (!r.IsSuccess())
391 return r;
392
393 auto& descriptors = descriptor_set_info_[cmd->GetDescriptorSet()].descriptors;
394
395 if (desc == nullptr) {
396 auto sampler_desc = MakeUnique<SamplerDescriptor>(
397 cmd->GetSampler(), DescriptorType::kSampler, device_,
398 cmd->GetDescriptorSet(), cmd->GetBinding());
399 descriptors.push_back(std::move(sampler_desc));
400 } else {
401 if (desc->GetDescriptorType() != DescriptorType::kSampler) {
402 return Result(
403 "Descriptors bound to the same binding needs to have matching "
404 "descriptor types");
405 }
406 desc->AsSamplerDescriptor()->AddAmberSampler(cmd->GetSampler());
407 }
408
409 return {};
410 }
411
SendDescriptorDataToDeviceIfNeeded()412 Result Pipeline::SendDescriptorDataToDeviceIfNeeded() {
413 {
414 CommandBufferGuard guard(GetCommandBuffer());
415 if (!guard.IsRecording())
416 return guard.GetResult();
417
418 for (auto& info : descriptor_set_info_) {
419 for (auto& desc : info.descriptors) {
420 Result r = desc->CreateResourceIfNeeded();
421 if (!r.IsSuccess())
422 return r;
423 }
424 }
425
426 // Initialize transfer buffers / images.
427 for (auto buffer : descriptor_buffers_) {
428 if (descriptor_transfer_resources_.count(buffer) == 0) {
429 return Result(
430 "Vulkan: Pipeline::SendDescriptorDataToDeviceIfNeeded() "
431 "descriptor's transfer resource is not found");
432 }
433 descriptor_transfer_resources_[buffer]->Initialize();
434 }
435
436 // Note that if a buffer for a descriptor is host accessible and
437 // does not need to record a command to copy data to device, it
438 // directly writes data to the buffer. The direct write must be
439 // done after resizing backed buffer i.e., copying data to the new
440 // buffer from the old one. Thus, we must submit commands here to
441 // guarantee this.
442 Result r = guard.Submit(GetFenceTimeout());
443 if (!r.IsSuccess())
444 return r;
445 }
446
447 CommandBufferGuard guard(GetCommandBuffer());
448 if (!guard.IsRecording())
449 return guard.GetResult();
450
451 // Copy descriptor data to transfer resources.
452 for (auto& buffer : descriptor_buffers_) {
453 if (auto transfer_buffer =
454 descriptor_transfer_resources_[buffer]->AsTransferBuffer()) {
455 BufferBackedDescriptor::RecordCopyBufferDataToTransferResourceIfNeeded(
456 GetCommandBuffer(), buffer, transfer_buffer);
457 } else if (auto transfer_image =
458 descriptor_transfer_resources_[buffer]->AsTransferImage()) {
459 transfer_image->ImageBarrier(GetCommandBuffer(),
460 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
461 VK_PIPELINE_STAGE_TRANSFER_BIT);
462
463 BufferBackedDescriptor::RecordCopyBufferDataToTransferResourceIfNeeded(
464 GetCommandBuffer(), buffer, transfer_image);
465
466 transfer_image->ImageBarrier(GetCommandBuffer(), VK_IMAGE_LAYOUT_GENERAL,
467 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
468 } else {
469 return Result(
470 "Vulkan: Pipeline::SendDescriptorDataToDeviceIfNeeded() "
471 "this should be unreachable");
472 }
473 }
474 return guard.Submit(GetFenceTimeout());
475 }
476
BindVkDescriptorSets(const VkPipelineLayout & pipeline_layout)477 void Pipeline::BindVkDescriptorSets(const VkPipelineLayout& pipeline_layout) {
478 for (size_t i = 0; i < descriptor_set_info_.size(); ++i) {
479 if (descriptor_set_info_[i].empty)
480 continue;
481
482 // Sort descriptors by binding number to get correct order of dynamic
483 // offsets.
484 typedef std::pair<uint32_t, std::vector<uint32_t>> binding_offsets_pair;
485 std::vector<binding_offsets_pair> binding_offsets;
486 for (const auto& desc : descriptor_set_info_[i].descriptors) {
487 binding_offsets.push_back(
488 {desc->GetBinding(), desc->GetDynamicOffsets()});
489 }
490
491 std::sort(std::begin(binding_offsets), std::end(binding_offsets),
492 [](const binding_offsets_pair& a, const binding_offsets_pair& b) {
493 return a.first < b.first;
494 });
495
496 // Add the sorted dynamic offsets.
497 std::vector<uint32_t> dynamic_offsets;
498 for (const auto& binding_offset : binding_offsets) {
499 for (auto offset : binding_offset.second) {
500 dynamic_offsets.push_back(offset);
501 }
502 }
503
504 device_->GetPtrs()->vkCmdBindDescriptorSets(
505 command_->GetVkCommandBuffer(),
506 IsGraphics() ? VK_PIPELINE_BIND_POINT_GRAPHICS
507 : VK_PIPELINE_BIND_POINT_COMPUTE,
508 pipeline_layout, static_cast<uint32_t>(i), 1,
509 &descriptor_set_info_[i].vk_desc_set,
510 static_cast<uint32_t>(dynamic_offsets.size()), dynamic_offsets.data());
511 }
512 }
513
ReadbackDescriptorsToHostDataQueue()514 Result Pipeline::ReadbackDescriptorsToHostDataQueue() {
515 // Record required commands to copy the data to a host visible buffer.
516 {
517 CommandBufferGuard guard(GetCommandBuffer());
518 if (!guard.IsRecording())
519 return guard.GetResult();
520
521 for (auto& buffer : descriptor_buffers_) {
522 if (descriptor_transfer_resources_.count(buffer) == 0) {
523 return Result(
524 "Vulkan: Pipeline::ReadbackDescriptorsToHostDataQueue() "
525 "descriptor's transfer resource is not found");
526 }
527 if (auto transfer_buffer =
528 descriptor_transfer_resources_[buffer]->AsTransferBuffer()) {
529 Result r = BufferBackedDescriptor::RecordCopyTransferResourceToHost(
530 GetCommandBuffer(), transfer_buffer);
531 if (!r.IsSuccess())
532 return r;
533 } else if (auto transfer_image = descriptor_transfer_resources_[buffer]
534 ->AsTransferImage()) {
535 transfer_image->ImageBarrier(GetCommandBuffer(),
536 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
537 VK_PIPELINE_STAGE_TRANSFER_BIT);
538 Result r = BufferBackedDescriptor::RecordCopyTransferResourceToHost(
539 GetCommandBuffer(), transfer_image);
540 if (!r.IsSuccess())
541 return r;
542 } else {
543 return Result(
544 "Vulkan: Pipeline::ReadbackDescriptorsToHostDataQueue() "
545 "this should be unreachable");
546 }
547 }
548
549 Result r = guard.Submit(GetFenceTimeout());
550 if (!r.IsSuccess())
551 return r;
552 }
553
554 // Move data from transfer buffers to output buffers.
555 for (auto& buffer : descriptor_buffers_) {
556 auto& transfer_resource = descriptor_transfer_resources_[buffer];
557 Result r = BufferBackedDescriptor::MoveTransferResourceToBufferOutput(
558 transfer_resource.get(), buffer);
559 if (!r.IsSuccess())
560 return r;
561 }
562 descriptor_transfer_resources_.clear();
563 return {};
564 }
565
GetEntryPointName(VkShaderStageFlagBits stage) const566 const char* Pipeline::GetEntryPointName(VkShaderStageFlagBits stage) const {
567 auto it = entry_points_.find(stage);
568 if (it != entry_points_.end())
569 return it->second.c_str();
570
571 return kDefaultEntryPointName;
572 }
573
574 } // namespace vulkan
575 } // namespace amber
576