1 // Copyright 2018 The Amber Authors.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include "src/vulkan/pipeline.h"
16
17 #include <algorithm>
18 #include <limits>
19 #include <utility>
20
21 #include "src/command.h"
22 #include "src/engine.h"
23 #include "src/make_unique.h"
24 #include "src/vulkan/buffer_descriptor.h"
25 #include "src/vulkan/compute_pipeline.h"
26 #include "src/vulkan/device.h"
27 #include "src/vulkan/graphics_pipeline.h"
28 #include "src/vulkan/image_descriptor.h"
29 #include "src/vulkan/sampler_descriptor.h"
30
31 namespace amber {
32 namespace vulkan {
33 namespace {
34
35 const char* kDefaultEntryPointName = "main";
36
37 } // namespace
38
Pipeline(PipelineType type,Device * device,uint32_t fence_timeout_ms,const std::vector<VkPipelineShaderStageCreateInfo> & shader_stage_info)39 Pipeline::Pipeline(
40 PipelineType type,
41 Device* device,
42 uint32_t fence_timeout_ms,
43 const std::vector<VkPipelineShaderStageCreateInfo>& shader_stage_info)
44 : device_(device),
45 pipeline_type_(type),
46 shader_stage_info_(shader_stage_info),
47 fence_timeout_ms_(fence_timeout_ms) {}
48
~Pipeline()49 Pipeline::~Pipeline() {
50 // Command must be reset before we destroy descriptors or we get a validation
51 // error.
52 command_ = nullptr;
53
54 for (auto& info : descriptor_set_info_) {
55 if (info.layout != VK_NULL_HANDLE) {
56 device_->GetPtrs()->vkDestroyDescriptorSetLayout(device_->GetVkDevice(),
57 info.layout, nullptr);
58 }
59
60 if (info.empty)
61 continue;
62
63 if (info.pool != VK_NULL_HANDLE) {
64 device_->GetPtrs()->vkDestroyDescriptorPool(device_->GetVkDevice(),
65 info.pool, nullptr);
66 }
67 }
68 }
69
AsGraphics()70 GraphicsPipeline* Pipeline::AsGraphics() {
71 return static_cast<GraphicsPipeline*>(this);
72 }
73
AsCompute()74 ComputePipeline* Pipeline::AsCompute() {
75 return static_cast<ComputePipeline*>(this);
76 }
77
Initialize(CommandPool * pool)78 Result Pipeline::Initialize(CommandPool* pool) {
79 push_constant_ = MakeUnique<PushConstant>(device_);
80
81 command_ = MakeUnique<CommandBuffer>(device_, pool);
82 return command_->Initialize();
83 }
84
CreateDescriptorSetLayouts()85 Result Pipeline::CreateDescriptorSetLayouts() {
86 for (auto& info : descriptor_set_info_) {
87 VkDescriptorSetLayoutCreateInfo desc_info =
88 VkDescriptorSetLayoutCreateInfo();
89 desc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
90
91 // If there are no descriptors for this descriptor set we only
92 // need to create its layout and there will be no bindings.
93 std::vector<VkDescriptorSetLayoutBinding> bindings;
94 for (auto& desc : info.descriptors) {
95 bindings.emplace_back();
96 bindings.back().binding = desc->GetBinding();
97 bindings.back().descriptorType = desc->GetVkDescriptorType();
98 bindings.back().descriptorCount = desc->GetDescriptorCount();
99 bindings.back().stageFlags = VK_SHADER_STAGE_ALL;
100 }
101 desc_info.bindingCount = static_cast<uint32_t>(bindings.size());
102 desc_info.pBindings = bindings.data();
103
104 if (device_->GetPtrs()->vkCreateDescriptorSetLayout(
105 device_->GetVkDevice(), &desc_info, nullptr, &info.layout) !=
106 VK_SUCCESS) {
107 return Result("Vulkan::Calling vkCreateDescriptorSetLayout Fail");
108 }
109 }
110
111 return {};
112 }
113
CreateDescriptorPools()114 Result Pipeline::CreateDescriptorPools() {
115 for (auto& info : descriptor_set_info_) {
116 if (info.empty)
117 continue;
118
119 std::vector<VkDescriptorPoolSize> pool_sizes;
120 for (auto& desc : info.descriptors) {
121 VkDescriptorType type = desc->GetVkDescriptorType();
122 auto it = find_if(pool_sizes.begin(), pool_sizes.end(),
123 [&type](const VkDescriptorPoolSize& size) {
124 return size.type == type;
125 });
126 if (it != pool_sizes.end()) {
127 it->descriptorCount += desc->GetDescriptorCount();
128 continue;
129 }
130
131 pool_sizes.emplace_back();
132 pool_sizes.back().type = type;
133 pool_sizes.back().descriptorCount = desc->GetDescriptorCount();
134 }
135
136 VkDescriptorPoolCreateInfo pool_info = VkDescriptorPoolCreateInfo();
137 pool_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
138 pool_info.maxSets = 1;
139 pool_info.poolSizeCount = static_cast<uint32_t>(pool_sizes.size());
140 pool_info.pPoolSizes = pool_sizes.data();
141
142 if (device_->GetPtrs()->vkCreateDescriptorPool(device_->GetVkDevice(),
143 &pool_info, nullptr,
144 &info.pool) != VK_SUCCESS) {
145 return Result("Vulkan::Calling vkCreateDescriptorPool Fail");
146 }
147 }
148
149 return {};
150 }
151
CreateDescriptorSets()152 Result Pipeline::CreateDescriptorSets() {
153 for (size_t i = 0; i < descriptor_set_info_.size(); ++i) {
154 if (descriptor_set_info_[i].empty)
155 continue;
156
157 VkDescriptorSetAllocateInfo desc_set_info = VkDescriptorSetAllocateInfo();
158 desc_set_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
159 desc_set_info.descriptorPool = descriptor_set_info_[i].pool;
160 desc_set_info.descriptorSetCount = 1;
161 desc_set_info.pSetLayouts = &descriptor_set_info_[i].layout;
162
163 VkDescriptorSet desc_set = VK_NULL_HANDLE;
164 if (device_->GetPtrs()->vkAllocateDescriptorSets(
165 device_->GetVkDevice(), &desc_set_info, &desc_set) != VK_SUCCESS) {
166 return Result("Vulkan::Calling vkAllocateDescriptorSets Fail");
167 }
168 descriptor_set_info_[i].vk_desc_set = desc_set;
169 }
170
171 return {};
172 }
173
CreateVkPipelineLayout(VkPipelineLayout * pipeline_layout)174 Result Pipeline::CreateVkPipelineLayout(VkPipelineLayout* pipeline_layout) {
175 Result r = CreateVkDescriptorRelatedObjectsIfNeeded();
176 if (!r.IsSuccess())
177 return r;
178
179 std::vector<VkDescriptorSetLayout> descriptor_set_layouts;
180 for (const auto& desc_set : descriptor_set_info_)
181 descriptor_set_layouts.push_back(desc_set.layout);
182
183 VkPipelineLayoutCreateInfo pipeline_layout_info =
184 VkPipelineLayoutCreateInfo();
185 pipeline_layout_info.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
186 pipeline_layout_info.setLayoutCount =
187 static_cast<uint32_t>(descriptor_set_layouts.size());
188 pipeline_layout_info.pSetLayouts = descriptor_set_layouts.data();
189
190 VkPushConstantRange push_const_range =
191 push_constant_->GetVkPushConstantRange();
192 if (push_const_range.size > 0) {
193 pipeline_layout_info.pushConstantRangeCount = 1U;
194 pipeline_layout_info.pPushConstantRanges = &push_const_range;
195 }
196
197 if (device_->GetPtrs()->vkCreatePipelineLayout(
198 device_->GetVkDevice(), &pipeline_layout_info, nullptr,
199 pipeline_layout) != VK_SUCCESS) {
200 return Result("Vulkan::Calling vkCreatePipelineLayout Fail");
201 }
202
203 return {};
204 }
205
CreateVkDescriptorRelatedObjectsIfNeeded()206 Result Pipeline::CreateVkDescriptorRelatedObjectsIfNeeded() {
207 if (descriptor_related_objects_already_created_)
208 return {};
209
210 Result r = CreateDescriptorSetLayouts();
211 if (!r.IsSuccess())
212 return r;
213
214 r = CreateDescriptorPools();
215 if (!r.IsSuccess())
216 return r;
217
218 r = CreateDescriptorSets();
219 if (!r.IsSuccess())
220 return r;
221
222 descriptor_related_objects_already_created_ = true;
223 return {};
224 }
225
UpdateDescriptorSetsIfNeeded()226 void Pipeline::UpdateDescriptorSetsIfNeeded() {
227 for (auto& info : descriptor_set_info_) {
228 for (auto& desc : info.descriptors)
229 desc->UpdateDescriptorSetIfNeeded(info.vk_desc_set);
230 }
231 }
232
RecordPushConstant(const VkPipelineLayout & pipeline_layout)233 Result Pipeline::RecordPushConstant(const VkPipelineLayout& pipeline_layout) {
234 return push_constant_->RecordPushConstantVkCommand(command_.get(),
235 pipeline_layout);
236 }
237
AddPushConstantBuffer(const Buffer * buf,uint32_t offset)238 Result Pipeline::AddPushConstantBuffer(const Buffer* buf, uint32_t offset) {
239 if (!buf)
240 return Result("Missing push constant buffer data");
241 return push_constant_->AddBuffer(buf, offset);
242 }
243
GetDescriptorSlot(uint32_t desc_set,uint32_t binding,Descriptor ** desc)244 Result Pipeline::GetDescriptorSlot(uint32_t desc_set,
245 uint32_t binding,
246 Descriptor** desc) {
247 *desc = nullptr;
248
249 if (desc_set >= descriptor_set_info_.size()) {
250 for (size_t i = descriptor_set_info_.size();
251 i <= static_cast<size_t>(desc_set); ++i) {
252 descriptor_set_info_.emplace_back();
253 }
254 }
255
256 if (descriptor_set_info_[desc_set].empty &&
257 descriptor_related_objects_already_created_) {
258 return Result(
259 "Vulkan: Pipeline descriptor related objects were already created but "
260 "try to put data on empty descriptor set '" +
261 std::to_string(desc_set) +
262 "'. Note that all used descriptor sets must be allocated before the "
263 "first compute or draw.");
264 }
265 descriptor_set_info_[desc_set].empty = false;
266
267 auto& descriptors = descriptor_set_info_[desc_set].descriptors;
268 for (auto& descriptor : descriptors) {
269 if (descriptor->GetBinding() == binding)
270 *desc = descriptor.get();
271 }
272
273 return {};
274 }
275
AddDescriptorBuffer(Buffer * amber_buffer)276 Result Pipeline::AddDescriptorBuffer(Buffer* amber_buffer) {
277 // Don't add the buffer if it's already added.
278 const auto& buffer = std::find_if(
279 descriptor_buffers_.begin(), descriptor_buffers_.end(),
280 [&](const Buffer* buf) { return buf == amber_buffer; });
281 if (buffer != descriptor_buffers_.end()) {
282 return {};
283 }
284 descriptor_buffers_.push_back(amber_buffer);
285 return {};
286 }
287
AddBufferDescriptor(const BufferCommand * cmd)288 Result Pipeline::AddBufferDescriptor(const BufferCommand* cmd) {
289 if (cmd == nullptr)
290 return Result("Pipeline::AddBufferDescriptor BufferCommand is nullptr");
291 if (!cmd->IsSSBO() && !cmd->IsUniform() && !cmd->IsStorageImage() &&
292 !cmd->IsSampledImage() && !cmd->IsCombinedImageSampler() &&
293 !cmd->IsUniformTexelBuffer() && !cmd->IsStorageTexelBuffer() &&
294 !cmd->IsUniformDynamic() && !cmd->IsSSBODynamic()) {
295 return Result("Pipeline::AddBufferDescriptor not supported buffer type");
296 }
297
298 Descriptor* desc;
299 Result r =
300 GetDescriptorSlot(cmd->GetDescriptorSet(), cmd->GetBinding(), &desc);
301 if (!r.IsSuccess())
302 return r;
303
304 auto& descriptors = descriptor_set_info_[cmd->GetDescriptorSet()].descriptors;
305
306 bool is_image = false;
307 DescriptorType desc_type = DescriptorType::kUniformBuffer;
308
309 if (cmd->IsStorageImage()) {
310 desc_type = DescriptorType::kStorageImage;
311 is_image = true;
312 } else if (cmd->IsSampledImage()) {
313 desc_type = DescriptorType::kSampledImage;
314 is_image = true;
315 } else if (cmd->IsCombinedImageSampler()) {
316 desc_type = DescriptorType::kCombinedImageSampler;
317 is_image = true;
318 } else if (cmd->IsUniformTexelBuffer()) {
319 desc_type = DescriptorType::kUniformTexelBuffer;
320 } else if (cmd->IsStorageTexelBuffer()) {
321 desc_type = DescriptorType::kStorageTexelBuffer;
322 } else if (cmd->IsSSBO()) {
323 desc_type = DescriptorType::kStorageBuffer;
324 } else if (cmd->IsUniformDynamic()) {
325 desc_type = DescriptorType::kUniformBufferDynamic;
326 } else if (cmd->IsSSBODynamic()) {
327 desc_type = DescriptorType::kStorageBufferDynamic;
328 }
329
330 if (desc == nullptr) {
331 if (is_image) {
332 auto image_desc = MakeUnique<ImageDescriptor>(
333 cmd->GetBuffer(), desc_type, device_, cmd->GetBaseMipLevel(),
334 cmd->GetDescriptorSet(), cmd->GetBinding(), this);
335 if (cmd->IsCombinedImageSampler())
336 image_desc->SetAmberSampler(cmd->GetSampler());
337
338 descriptors.push_back(std::move(image_desc));
339 } else {
340 auto buffer_desc = MakeUnique<BufferDescriptor>(
341 cmd->GetBuffer(), desc_type, device_, cmd->GetDescriptorSet(),
342 cmd->GetBinding(), this);
343 descriptors.push_back(std::move(buffer_desc));
344 }
345 AddDescriptorBuffer(cmd->GetBuffer());
346 desc = descriptors.back().get();
347 } else {
348 if (desc->GetDescriptorType() != desc_type) {
349 return Result(
350 "Descriptors bound to the same binding needs to have matching "
351 "descriptor types");
352 }
353 desc->AsBufferBackedDescriptor()->AddAmberBuffer(cmd->GetBuffer());
354 AddDescriptorBuffer(cmd->GetBuffer());
355 }
356
357 if (cmd->IsUniformDynamic() || cmd->IsSSBODynamic())
358 desc->AsBufferDescriptor()->AddDynamicOffset(cmd->GetDynamicOffset());
359
360 if (cmd->IsUniform() || cmd->IsUniformDynamic() || cmd->IsSSBO() ||
361 cmd->IsSSBODynamic()) {
362 desc->AsBufferDescriptor()->AddDescriptorOffset(cmd->GetDescriptorOffset());
363 desc->AsBufferDescriptor()->AddDescriptorRange(cmd->GetDescriptorRange());
364 }
365
366 if (cmd->IsSSBO() && !desc->IsStorageBuffer()) {
367 return Result(
368 "Vulkan::AddBufferDescriptor BufferCommand for SSBO uses wrong "
369 "descriptor "
370 "set and binding");
371 }
372
373 if (cmd->IsUniform() && !desc->IsUniformBuffer()) {
374 return Result(
375 "Vulkan::AddBufferDescriptor BufferCommand for UBO uses wrong "
376 "descriptor set "
377 "and binding");
378 }
379
380 return {};
381 }
382
AddSamplerDescriptor(const SamplerCommand * cmd)383 Result Pipeline::AddSamplerDescriptor(const SamplerCommand* cmd) {
384 if (cmd == nullptr)
385 return Result("Pipeline::AddSamplerDescriptor SamplerCommand is nullptr");
386
387 Descriptor* desc;
388 Result r =
389 GetDescriptorSlot(cmd->GetDescriptorSet(), cmd->GetBinding(), &desc);
390 if (!r.IsSuccess())
391 return r;
392
393 auto& descriptors = descriptor_set_info_[cmd->GetDescriptorSet()].descriptors;
394
395 if (desc == nullptr) {
396 auto sampler_desc = MakeUnique<SamplerDescriptor>(
397 cmd->GetSampler(), DescriptorType::kSampler, device_,
398 cmd->GetDescriptorSet(), cmd->GetBinding());
399 descriptors.push_back(std::move(sampler_desc));
400 } else {
401 if (desc->GetDescriptorType() != DescriptorType::kSampler) {
402 return Result(
403 "Descriptors bound to the same binding needs to have matching "
404 "descriptor types");
405 }
406 desc->AsSamplerDescriptor()->AddAmberSampler(cmd->GetSampler());
407 }
408
409 return {};
410 }
411
SendDescriptorDataToDeviceIfNeeded()412 Result Pipeline::SendDescriptorDataToDeviceIfNeeded() {
413 {
414 CommandBufferGuard guard(GetCommandBuffer());
415 if (!guard.IsRecording())
416 return guard.GetResult();
417
418 for (auto& info : descriptor_set_info_) {
419 for (auto& desc : info.descriptors) {
420 Result r = desc->CreateResourceIfNeeded();
421 if (!r.IsSuccess())
422 return r;
423 }
424 }
425
426 // Initialize transfer buffers / images.
427 for (auto buffer : descriptor_buffers_) {
428 if (descriptor_transfer_resources_.count(buffer) == 0) {
429 return Result(
430 "Vulkan: Pipeline::SendDescriptorDataToDeviceIfNeeded() "
431 "descriptor's transfer resource is not found");
432 }
433 Result r = descriptor_transfer_resources_[buffer]->Initialize();
434 if (!r.IsSuccess())
435 return r;
436 }
437
438 // Note that if a buffer for a descriptor is host accessible and
439 // does not need to record a command to copy data to device, it
440 // directly writes data to the buffer. The direct write must be
441 // done after resizing backed buffer i.e., copying data to the new
442 // buffer from the old one. Thus, we must submit commands here to
443 // guarantee this.
444 Result r = guard.Submit(GetFenceTimeout());
445 if (!r.IsSuccess())
446 return r;
447 }
448
449 CommandBufferGuard guard(GetCommandBuffer());
450 if (!guard.IsRecording())
451 return guard.GetResult();
452
453 // Copy descriptor data to transfer resources.
454 for (auto& buffer : descriptor_buffers_) {
455 if (auto transfer_buffer =
456 descriptor_transfer_resources_[buffer]->AsTransferBuffer()) {
457 BufferBackedDescriptor::RecordCopyBufferDataToTransferResourceIfNeeded(
458 GetCommandBuffer(), buffer, transfer_buffer);
459 } else if (auto transfer_image =
460 descriptor_transfer_resources_[buffer]->AsTransferImage()) {
461 transfer_image->ImageBarrier(GetCommandBuffer(),
462 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
463 VK_PIPELINE_STAGE_TRANSFER_BIT);
464
465 BufferBackedDescriptor::RecordCopyBufferDataToTransferResourceIfNeeded(
466 GetCommandBuffer(), buffer, transfer_image);
467
468 transfer_image->ImageBarrier(GetCommandBuffer(), VK_IMAGE_LAYOUT_GENERAL,
469 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
470 } else {
471 return Result(
472 "Vulkan: Pipeline::SendDescriptorDataToDeviceIfNeeded() "
473 "this should be unreachable");
474 }
475 }
476 return guard.Submit(GetFenceTimeout());
477 }
478
BindVkDescriptorSets(const VkPipelineLayout & pipeline_layout)479 void Pipeline::BindVkDescriptorSets(const VkPipelineLayout& pipeline_layout) {
480 for (size_t i = 0; i < descriptor_set_info_.size(); ++i) {
481 if (descriptor_set_info_[i].empty)
482 continue;
483
484 // Sort descriptors by binding number to get correct order of dynamic
485 // offsets.
486 typedef std::pair<uint32_t, std::vector<uint32_t>> binding_offsets_pair;
487 std::vector<binding_offsets_pair> binding_offsets;
488 for (const auto& desc : descriptor_set_info_[i].descriptors) {
489 binding_offsets.push_back(
490 {desc->GetBinding(), desc->GetDynamicOffsets()});
491 }
492
493 std::sort(std::begin(binding_offsets), std::end(binding_offsets),
494 [](const binding_offsets_pair& a, const binding_offsets_pair& b) {
495 return a.first < b.first;
496 });
497
498 // Add the sorted dynamic offsets.
499 std::vector<uint32_t> dynamic_offsets;
500 for (const auto& binding_offset : binding_offsets) {
501 for (auto offset : binding_offset.second) {
502 dynamic_offsets.push_back(offset);
503 }
504 }
505
506 device_->GetPtrs()->vkCmdBindDescriptorSets(
507 command_->GetVkCommandBuffer(),
508 IsGraphics() ? VK_PIPELINE_BIND_POINT_GRAPHICS
509 : VK_PIPELINE_BIND_POINT_COMPUTE,
510 pipeline_layout, static_cast<uint32_t>(i), 1,
511 &descriptor_set_info_[i].vk_desc_set,
512 static_cast<uint32_t>(dynamic_offsets.size()), dynamic_offsets.data());
513 }
514 }
515
ReadbackDescriptorsToHostDataQueue()516 Result Pipeline::ReadbackDescriptorsToHostDataQueue() {
517 // Record required commands to copy the data to a host visible buffer.
518 {
519 CommandBufferGuard guard(GetCommandBuffer());
520 if (!guard.IsRecording())
521 return guard.GetResult();
522
523 for (auto& buffer : descriptor_buffers_) {
524 if (descriptor_transfer_resources_.count(buffer) == 0) {
525 return Result(
526 "Vulkan: Pipeline::ReadbackDescriptorsToHostDataQueue() "
527 "descriptor's transfer resource is not found");
528 }
529 if (auto transfer_buffer =
530 descriptor_transfer_resources_[buffer]->AsTransferBuffer()) {
531 Result r = BufferBackedDescriptor::RecordCopyTransferResourceToHost(
532 GetCommandBuffer(), transfer_buffer);
533 if (!r.IsSuccess())
534 return r;
535 } else if (auto transfer_image = descriptor_transfer_resources_[buffer]
536 ->AsTransferImage()) {
537 transfer_image->ImageBarrier(GetCommandBuffer(),
538 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
539 VK_PIPELINE_STAGE_TRANSFER_BIT);
540 Result r = BufferBackedDescriptor::RecordCopyTransferResourceToHost(
541 GetCommandBuffer(), transfer_image);
542 if (!r.IsSuccess())
543 return r;
544 } else {
545 return Result(
546 "Vulkan: Pipeline::ReadbackDescriptorsToHostDataQueue() "
547 "this should be unreachable");
548 }
549 }
550
551 Result r = guard.Submit(GetFenceTimeout());
552 if (!r.IsSuccess())
553 return r;
554 }
555
556 // Move data from transfer buffers to output buffers.
557 for (auto& buffer : descriptor_buffers_) {
558 auto& transfer_resource = descriptor_transfer_resources_[buffer];
559 Result r = BufferBackedDescriptor::MoveTransferResourceToBufferOutput(
560 transfer_resource.get(), buffer);
561 if (!r.IsSuccess())
562 return r;
563 }
564 descriptor_transfer_resources_.clear();
565 return {};
566 }
567
GetEntryPointName(VkShaderStageFlagBits stage) const568 const char* Pipeline::GetEntryPointName(VkShaderStageFlagBits stage) const {
569 auto it = entry_points_.find(stage);
570 if (it != entry_points_.end())
571 return it->second.c_str();
572
573 return kDefaultEntryPointName;
574 }
575
576 } // namespace vulkan
577 } // namespace amber
578