1 /*
2 * GStreamer Plugins Vulkan
3 * Copyright (C) 2019 Matthew Waters <matthew@centricular.com>
4 *
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
9 *
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
14 *
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
19 */
20
21 #ifdef HAVE_CONFIG_H
22 #include "config.h"
23 #endif
24
25 #include "gstvkfullscreenquad.h"
26
27 /**
28 * SECTION:vkfullscreenquad
29 * @title: GstVulkanFullScreenQuad
30 * @short_description: Vulkan full screen quad
31 * @see_also: #GstVulkanDevice, #GstVulkanImageMemory
32 *
33 * A #GstVulkanFullScreenQuad is a helper object for rendering a single input
34 * image to an output #GstBuffer
35 */
36
37 #define GST_CAT_DEFAULT gst_vulkan_full_screen_quad_debug
38 GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
39
40 /* XXX: privatise this on moving to lib */
41 struct Vertex
42 {
43 float x, y, z;
44 float s, t;
45 };
46
47 struct _GstVulkanFullScreenQuadPrivate
48 {
49 GstBuffer *inbuf;
50 GstBuffer *outbuf;
51
52 GstMemory *vertices;
53 GstMemory *indices;
54 gsize n_indices;
55 GstMemory *uniforms;
56 gsize uniform_size;
57
58 GstVulkanHandle *vert;
59 GstVulkanHandle *frag;
60 };
61
62 G_DEFINE_TYPE_WITH_CODE (GstVulkanFullScreenQuad, gst_vulkan_full_screen_quad,
63 GST_TYPE_OBJECT, GST_DEBUG_CATEGORY_INIT (gst_vulkan_full_screen_quad_debug,
64 "vulkanfullscreenquad", 0, "vulkan fullscreen quad render");
65 G_ADD_PRIVATE (GstVulkanFullScreenQuad));
66
67 #define GET_PRIV(self) gst_vulkan_full_screen_quad_get_instance_private (self)
68
69 struct Vertex vertices[] = {
70 {-1.0f, -1.0f, 0.0f, 0.0f, 0.0f},
71 {1.0f, -1.0f, 0.0f, 1.0f, 0.0f},
72 {1.0f, 1.0f, 0.0f, 1.0f, 1.0f},
73 {-1.0f, 1.0f, 0.0f, 0.0f, 1.0f},
74 };
75
76 gushort indices[] = {
77 0, 1, 2, 0, 2, 3,
78 };
79
80 static gboolean
create_sampler(GstVulkanFullScreenQuad * self,GError ** error)81 create_sampler (GstVulkanFullScreenQuad * self, GError ** error)
82 {
83 /* *INDENT-OFF* */
84 VkSamplerCreateInfo samplerInfo = {
85 .sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
86 .magFilter = VK_FILTER_LINEAR,
87 .minFilter = VK_FILTER_LINEAR,
88 .addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
89 .addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
90 .addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
91 .anisotropyEnable = VK_FALSE,
92 .maxAnisotropy = 1,
93 .borderColor = VK_BORDER_COLOR_INT_OPAQUE_BLACK,
94 .unnormalizedCoordinates = VK_FALSE,
95 .compareEnable = VK_FALSE,
96 .compareOp = VK_COMPARE_OP_ALWAYS,
97 .mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR,
98 .mipLodBias = 0.0f,
99 .minLod = 0.0f,
100 .maxLod = 0.0f
101 };
102 /* *INDENT-ON* */
103 VkSampler sampler;
104 VkResult err;
105
106 err =
107 vkCreateSampler (self->queue->device->device, &samplerInfo, NULL,
108 &sampler);
109 if (gst_vulkan_error_to_g_error (err, error, "vkCreateSampler") < 0) {
110 return FALSE;
111 }
112
113 self->sampler = gst_vulkan_handle_new_wrapped (self->queue->device,
114 GST_VULKAN_HANDLE_TYPE_SAMPLER, (GstVulkanHandleTypedef) sampler,
115 gst_vulkan_handle_free_sampler, NULL);
116
117 return TRUE;
118 }
119
120 static GstVulkanDescriptorSet *
get_and_update_descriptor_set(GstVulkanFullScreenQuad * self,GstVulkanImageView ** views,GError ** error)121 get_and_update_descriptor_set (GstVulkanFullScreenQuad * self,
122 GstVulkanImageView ** views, GError ** error)
123 {
124 GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
125 GstVulkanDescriptorSet *set;
126
127 if (!self->sampler)
128 if (!create_sampler (self, error))
129 return NULL;
130
131 if (!(set =
132 gst_vulkan_descriptor_cache_acquire (self->descriptor_cache, error)))
133 return NULL;
134
135 {
136 VkWriteDescriptorSet writes[GST_VIDEO_MAX_PLANES + 1];
137 VkDescriptorImageInfo image_info[GST_VIDEO_MAX_PLANES];
138 VkDescriptorBufferInfo buffer_info;
139 int write_n = 0;
140 int i;
141
142 /* *INDENT-OFF* */
143 if (priv->uniforms) {
144 buffer_info = (VkDescriptorBufferInfo) {
145 .buffer = ((GstVulkanBufferMemory *) priv->uniforms)->buffer,
146 .offset = 0,
147 .range = priv->uniform_size
148 };
149
150 writes[write_n++] = (VkWriteDescriptorSet) {
151 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
152 .pNext = NULL,
153 .dstSet = set->set,
154 .dstBinding = 0,
155 .dstArrayElement = 0,
156 .descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
157 .descriptorCount = 1,
158 .pBufferInfo = &buffer_info
159 };
160 }
161
162 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->in_info); i++) {
163 image_info[i] = (VkDescriptorImageInfo) {
164 .imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
165 .imageView = views[i]->view,
166 .sampler = (VkSampler) self->sampler->handle
167 };
168
169 writes[write_n++] = (VkWriteDescriptorSet) {
170 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
171 .pNext = NULL,
172 .dstSet = set->set,
173 .dstBinding = i + 1,
174 .dstArrayElement = 0,
175 .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
176 .descriptorCount = 1,
177 .pImageInfo = &image_info[i]
178 };
179 }
180 /* *INDENT-ON* */
181 vkUpdateDescriptorSets (self->queue->device->device, write_n, writes, 0,
182 NULL);
183 }
184
185 return set;
186 }
187
188 static gboolean
create_descriptor_set_layout(GstVulkanFullScreenQuad * self,GError ** error)189 create_descriptor_set_layout (GstVulkanFullScreenQuad * self, GError ** error)
190 {
191 VkDescriptorSetLayoutBinding bindings[GST_VIDEO_MAX_PLANES + 1] = { {0,} };
192 VkDescriptorSetLayoutCreateInfo layout_info;
193 VkDescriptorSetLayout descriptor_set_layout;
194 int descriptor_n = 0;
195 VkResult err;
196 int i;
197
198 /* *INDENT-OFF* */
199 bindings[descriptor_n++] = (VkDescriptorSetLayoutBinding) {
200 .binding = 0,
201 .descriptorCount = 1,
202 .descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
203 .pImmutableSamplers = NULL,
204 .stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT
205 };
206 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->in_info); i++) {
207 bindings[descriptor_n++] = (VkDescriptorSetLayoutBinding) {
208 .binding = i+1,
209 .descriptorCount = 1,
210 .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
211 .pImmutableSamplers = NULL,
212 .stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT
213 };
214 };
215
216 layout_info = (VkDescriptorSetLayoutCreateInfo) {
217 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
218 .pNext = NULL,
219 .bindingCount = descriptor_n,
220 .pBindings = bindings
221 };
222 /* *INDENT-ON* */
223
224 err =
225 vkCreateDescriptorSetLayout (self->queue->device->device, &layout_info,
226 NULL, &descriptor_set_layout);
227 if (gst_vulkan_error_to_g_error (err, error,
228 "vkCreateDescriptorSetLayout") < 0) {
229 return FALSE;
230 }
231
232 self->descriptor_set_layout =
233 gst_vulkan_handle_new_wrapped (self->queue->device,
234 GST_VULKAN_HANDLE_TYPE_DESCRIPTOR_SET_LAYOUT,
235 (GstVulkanHandleTypedef) descriptor_set_layout,
236 gst_vulkan_handle_free_descriptor_set_layout, NULL);
237
238 return TRUE;
239 }
240
241 static gboolean
create_pipeline_layout(GstVulkanFullScreenQuad * self,GError ** error)242 create_pipeline_layout (GstVulkanFullScreenQuad * self, GError ** error)
243 {
244 VkPipelineLayoutCreateInfo pipeline_layout_info;
245 VkPipelineLayout pipeline_layout;
246 VkResult err;
247
248 if (!self->descriptor_set_layout)
249 if (!create_descriptor_set_layout (self, error))
250 return FALSE;
251
252 /* *INDENT-OFF* */
253 pipeline_layout_info = (VkPipelineLayoutCreateInfo) {
254 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
255 .pNext = NULL,
256 .setLayoutCount = 1,
257 .pSetLayouts = (VkDescriptorSetLayout *) &self->descriptor_set_layout->handle,
258 .pushConstantRangeCount = 0,
259 .pPushConstantRanges = NULL,
260 };
261 /* *INDENT-ON* */
262
263 err =
264 vkCreatePipelineLayout (self->queue->device->device,
265 &pipeline_layout_info, NULL, &pipeline_layout);
266 if (gst_vulkan_error_to_g_error (err, error, "vkCreatePipelineLayout") < 0) {
267 return FALSE;
268 }
269
270 self->pipeline_layout = gst_vulkan_handle_new_wrapped (self->queue->device,
271 GST_VULKAN_HANDLE_TYPE_PIPELINE_LAYOUT,
272 (GstVulkanHandleTypedef) pipeline_layout,
273 gst_vulkan_handle_free_pipeline_layout, NULL);
274
275 return TRUE;
276 }
277
278 static gboolean
create_render_pass(GstVulkanFullScreenQuad * self,GError ** error)279 create_render_pass (GstVulkanFullScreenQuad * self, GError ** error)
280 {
281 VkAttachmentDescription color_attachments[GST_VIDEO_MAX_PLANES];
282 VkAttachmentReference color_attachment_refs[GST_VIDEO_MAX_PLANES];
283 VkRenderPassCreateInfo render_pass_info;
284 VkSubpassDescription subpass;
285 VkRenderPass render_pass;
286 VkResult err;
287 int i;
288
289 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->out_info); i++) {
290 /* *INDENT-OFF* */
291 color_attachments[i] = (VkAttachmentDescription) {
292 .format = gst_vulkan_format_from_video_info (&self->out_info, i),
293 .samples = VK_SAMPLE_COUNT_1_BIT,
294 .loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR,
295 .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
296 .stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE,
297 .stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
298 /* FIXME: share this between elements to avoid pipeline barriers */
299 .initialLayout = VK_IMAGE_LAYOUT_UNDEFINED,
300 .finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
301 };
302
303 color_attachment_refs[i] = (VkAttachmentReference) {
304 .attachment = i,
305 .layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
306 };
307 /* *INDENT-ON* */
308 }
309
310 /* *INDENT-OFF* */
311 subpass = (VkSubpassDescription) {
312 .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
313 .colorAttachmentCount = GST_VIDEO_INFO_N_PLANES (&self->out_info),
314 .pColorAttachments = color_attachment_refs
315 };
316
317 render_pass_info = (VkRenderPassCreateInfo) {
318 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
319 .pNext = NULL,
320 .attachmentCount = GST_VIDEO_INFO_N_PLANES (&self->out_info),
321 .pAttachments = color_attachments,
322 .subpassCount = 1,
323 .pSubpasses = &subpass
324 };
325 /* *INDENT-ON* */
326
327 err =
328 vkCreateRenderPass (self->queue->device->device, &render_pass_info, NULL,
329 &render_pass);
330 if (gst_vulkan_error_to_g_error (err, error, "vkCreateRenderPass") < 0) {
331 return FALSE;
332 }
333
334 self->render_pass = gst_vulkan_handle_new_wrapped (self->queue->device,
335 GST_VULKAN_HANDLE_TYPE_RENDER_PASS,
336 (GstVulkanHandleTypedef) render_pass,
337 gst_vulkan_handle_free_render_pass, NULL);
338
339 return TRUE;
340 }
341
342 static gboolean
create_pipeline(GstVulkanFullScreenQuad * self,GError ** error)343 create_pipeline (GstVulkanFullScreenQuad * self, GError ** error)
344 {
345 GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
346 VkVertexInputBindingDescription vertex_binding;
347 VkVertexInputAttributeDescription attribute_descriptions[2];
348 VkPipelineShaderStageCreateInfo shader_create_info[2];
349 VkPipelineVertexInputStateCreateInfo vertex_input_info;
350 VkPipelineInputAssemblyStateCreateInfo input_assembly;
351 VkPipelineViewportStateCreateInfo viewport_state;
352 VkPipelineRasterizationStateCreateInfo rasterizer;
353 VkPipelineMultisampleStateCreateInfo multisampling;
354 VkPipelineColorBlendAttachmentState
355 color_blend_attachments[GST_VIDEO_MAX_PLANES];
356 VkPipelineColorBlendStateCreateInfo color_blending;
357 VkGraphicsPipelineCreateInfo pipeline_create_info;
358 VkPipeline pipeline;
359 VkResult err;
360
361 if (!priv->vert || !priv->frag) {
362 g_set_error_literal (error, GST_VULKAN_ERROR,
363 VK_ERROR_INITIALIZATION_FAILED, "Missing shader information");
364 return FALSE;
365 }
366
367 if (!self->pipeline_layout)
368 if (!create_pipeline_layout (self, error))
369 return FALSE;
370
371 if (!self->render_pass)
372 if (!create_render_pass (self, error))
373 return FALSE;
374
375 /* *INDENT-OFF* */
376 shader_create_info[0] = (VkPipelineShaderStageCreateInfo) {
377 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
378 .pNext = NULL,
379 .stage = VK_SHADER_STAGE_VERTEX_BIT,
380 .module = (VkShaderModule) priv->vert->handle,
381 .pName = "main"
382 };
383
384 shader_create_info[1] = (VkPipelineShaderStageCreateInfo) {
385 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
386 .pNext = NULL,
387 .stage = VK_SHADER_STAGE_FRAGMENT_BIT,
388 .module = (VkShaderModule) priv->frag->handle,
389 .pName = "main"
390 };
391
392 /* *INDENT-OFF* */
393 vertex_binding = (VkVertexInputBindingDescription) {
394 .binding = 0,
395 .stride = sizeof (struct Vertex),
396 .inputRate = VK_VERTEX_INPUT_RATE_VERTEX
397 };
398
399 attribute_descriptions[0] = (VkVertexInputAttributeDescription) {
400 .binding = 0,
401 .location = 0,
402 .format = VK_FORMAT_R32G32B32_SFLOAT,
403 .offset = G_STRUCT_OFFSET (struct Vertex, x)
404 };
405 attribute_descriptions[1] = (VkVertexInputAttributeDescription) {
406 .binding = 0,
407 .location = 1,
408 .format = VK_FORMAT_R32G32_SFLOAT,
409 .offset = G_STRUCT_OFFSET (struct Vertex, s)
410 };
411
412 vertex_input_info = (VkPipelineVertexInputStateCreateInfo) {
413 .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
414 .pNext = NULL,
415 .vertexBindingDescriptionCount = 1,
416 .pVertexBindingDescriptions = &vertex_binding,
417 .vertexAttributeDescriptionCount = 2,
418 .pVertexAttributeDescriptions = attribute_descriptions,
419 };
420
421 input_assembly = (VkPipelineInputAssemblyStateCreateInfo) {
422 .sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
423 .pNext = NULL,
424 .topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
425 .primitiveRestartEnable = VK_FALSE
426 };
427
428 viewport_state = (VkPipelineViewportStateCreateInfo) {
429 .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
430 .pNext = NULL,
431 .viewportCount = 1,
432 .pViewports = &(VkViewport) {
433 .x = 0.0f,
434 .y = 0.0f,
435 .width = (float) GST_VIDEO_INFO_WIDTH (&self->out_info),
436 .height = (float) GST_VIDEO_INFO_HEIGHT (&self->out_info),
437 .minDepth = 0.0f,
438 .maxDepth = 1.0f
439 },
440 .scissorCount = 1,
441 .pScissors = &(VkRect2D) {
442 .offset = { 0, 0 },
443 .extent = {
444 GST_VIDEO_INFO_WIDTH (&self->out_info),
445 GST_VIDEO_INFO_HEIGHT (&self->out_info)
446 }
447 }
448 };
449
450 rasterizer = (VkPipelineRasterizationStateCreateInfo) {
451 .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
452 .pNext = NULL,
453 .depthClampEnable = VK_FALSE,
454 .rasterizerDiscardEnable = VK_FALSE,
455 .polygonMode = VK_POLYGON_MODE_FILL,
456 .lineWidth = 1.0f,
457 .cullMode = VK_CULL_MODE_NONE,
458 .frontFace = VK_FRONT_FACE_CLOCKWISE,
459 .depthBiasEnable = VK_FALSE
460 };
461
462 multisampling = (VkPipelineMultisampleStateCreateInfo) {
463 .sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
464 .pNext = NULL,
465 .sampleShadingEnable = VK_FALSE,
466 .rasterizationSamples = VK_SAMPLE_COUNT_1_BIT
467 };
468
469 color_blend_attachments[0] = (VkPipelineColorBlendAttachmentState) {
470 .colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT,
471 .blendEnable = VK_FALSE
472 };
473 color_blend_attachments[1] = (VkPipelineColorBlendAttachmentState) {
474 .colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT,
475 .blendEnable = VK_FALSE
476 };
477 color_blend_attachments[2] = (VkPipelineColorBlendAttachmentState) {
478 .colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT,
479 .blendEnable = VK_FALSE
480 };
481 color_blend_attachments[3] = (VkPipelineColorBlendAttachmentState) {
482 .colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT,
483 .blendEnable = VK_FALSE
484 };
485
486 color_blending = (VkPipelineColorBlendStateCreateInfo) {
487 .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
488 .pNext = NULL,
489 .logicOpEnable = VK_FALSE,
490 .logicOp = VK_LOGIC_OP_COPY,
491 .attachmentCount = GST_VIDEO_INFO_N_PLANES (&self->out_info),
492 .pAttachments = color_blend_attachments,
493 .blendConstants = { 0.0f, 0.0f, 0.0f, 0.0f }
494 };
495
496 pipeline_create_info = (VkGraphicsPipelineCreateInfo) {
497 .sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
498 .pNext = NULL,
499 .stageCount = 2,
500 .pStages = shader_create_info,
501 .pVertexInputState = &vertex_input_info,
502 .pInputAssemblyState = &input_assembly,
503 .pViewportState = &viewport_state,
504 .pRasterizationState = &rasterizer,
505 .pMultisampleState = &multisampling,
506 .pColorBlendState = &color_blending,
507 .layout = (VkPipelineLayout) self->pipeline_layout->handle,
508 .renderPass = (VkRenderPass) self->render_pass->handle,
509 .subpass = 0,
510 .basePipelineHandle = VK_NULL_HANDLE
511 };
512 /* *INDENT-ON* */
513
514 err =
515 vkCreateGraphicsPipelines (self->queue->device->device, VK_NULL_HANDLE, 1,
516 &pipeline_create_info, NULL, &pipeline);
517 if (gst_vulkan_error_to_g_error (err, error, "vkCreateGraphicsPipelines") < 0) {
518 return FALSE;
519 }
520
521 self->graphics_pipeline = gst_vulkan_handle_new_wrapped (self->queue->device,
522 GST_VULKAN_HANDLE_TYPE_PIPELINE, (GstVulkanHandleTypedef) pipeline,
523 gst_vulkan_handle_free_pipeline, NULL);
524
525 return TRUE;
526 }
527
528 static gboolean
create_descriptor_pool(GstVulkanFullScreenQuad * self,GError ** error)529 create_descriptor_pool (GstVulkanFullScreenQuad * self, GError ** error)
530 {
531 GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
532 VkDescriptorPoolCreateInfo pool_info;
533 gsize max_sets = 32; /* FIXME: don't hardcode this! */
534 guint n_pools = 1;
535 VkDescriptorPoolSize pool_sizes[2];
536 VkDescriptorPool pool;
537 GstVulkanDescriptorPool *ret;
538 VkResult err;
539
540 /* *INDENT-OFF* */
541 pool_sizes[0] = (VkDescriptorPoolSize) {
542 .type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
543 .descriptorCount = max_sets * GST_VIDEO_INFO_N_PLANES (&self->in_info),
544 };
545
546 if (priv->uniforms) {
547 pool_sizes[1] = (VkDescriptorPoolSize) {
548 .type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
549 .descriptorCount = max_sets
550 };
551 n_pools++;
552 }
553
554 pool_info = (VkDescriptorPoolCreateInfo) {
555 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
556 .pNext = NULL,
557 .flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
558 .poolSizeCount = n_pools,
559 .pPoolSizes = pool_sizes,
560 .maxSets = max_sets
561 };
562 /* *INDENT-ON* */
563
564 err =
565 vkCreateDescriptorPool (self->queue->device->device, &pool_info, NULL,
566 &pool);
567 if (gst_vulkan_error_to_g_error (err, error, "vkCreateDescriptorPool") < 0) {
568 return FALSE;
569 }
570
571 ret =
572 gst_vulkan_descriptor_pool_new_wrapped (self->queue->device, pool,
573 max_sets);
574 self->descriptor_cache =
575 gst_vulkan_descriptor_cache_new (ret, 1, &self->descriptor_set_layout);
576 gst_object_unref (ret);
577
578 return TRUE;
579 }
580
581 static gboolean
create_framebuffer(GstVulkanFullScreenQuad * self,GstVulkanImageView ** views,GError ** error)582 create_framebuffer (GstVulkanFullScreenQuad * self, GstVulkanImageView ** views,
583 GError ** error)
584 {
585 VkImageView attachments[GST_VIDEO_MAX_PLANES] = { 0, };
586 VkFramebufferCreateInfo framebuffer_info;
587 VkFramebuffer framebuffer;
588 VkResult err;
589 int i;
590
591 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->out_info); i++) {
592 attachments[i] = views[i]->view;
593 }
594
595 /* *INDENT-OFF* */
596 framebuffer_info = (VkFramebufferCreateInfo) {
597 .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
598 .pNext = NULL,
599 .renderPass = (VkRenderPass) self->render_pass->handle,
600 .attachmentCount = GST_VIDEO_INFO_N_PLANES (&self->out_info),
601 .pAttachments = attachments,
602 .width = GST_VIDEO_INFO_WIDTH (&self->out_info),
603 .height = GST_VIDEO_INFO_HEIGHT (&self->out_info),
604 .layers = 1
605 };
606 /* *INDENT-ON* */
607
608 err =
609 vkCreateFramebuffer (self->queue->device->device, &framebuffer_info, NULL,
610 &framebuffer);
611 if (gst_vulkan_error_to_g_error (err, error, "vkCreateFramebuffer") < 0) {
612 return FALSE;
613 }
614
615 self->framebuffer = gst_vulkan_handle_new_wrapped (self->queue->device,
616 GST_VULKAN_HANDLE_TYPE_FRAMEBUFFER, (GstVulkanHandleTypedef) framebuffer,
617 gst_vulkan_handle_free_framebuffer, NULL);
618
619 return TRUE;
620 }
621
622 #define LAST_FENCE_OR_ALWAYS_SIGNALLED(self,device) \
623 self->last_fence ? gst_vulkan_fence_ref (self->last_fence) : gst_vulkan_fence_new_always_signalled (device)
624
625 GstVulkanFence *
gst_vulkan_full_screen_quad_get_last_fence(GstVulkanFullScreenQuad * self)626 gst_vulkan_full_screen_quad_get_last_fence (GstVulkanFullScreenQuad * self)
627 {
628 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), NULL);
629
630 return LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
631 }
632
633 #define clear_field(field,type,trash_free_func) \
634 static void \
635 G_PASTE(clear_,field) (GstVulkanFullScreenQuad * self) \
636 { \
637 GstVulkanFence *last_fence = \
638 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device); \
639 \
640 if (self->field) \
641 gst_vulkan_trash_list_add (self->trash_list, \
642 gst_vulkan_trash_list_acquire (self->trash_list, last_fence, \
643 trash_free_func, (type) self->field)); \
644 self->field = NULL; \
645 \
646 gst_vulkan_fence_unref (last_fence); \
647 }
648
649 #define clear_field_mini_object(field) clear_field (field,GstMiniObject *,gst_vulkan_trash_mini_object_unref);
650 #define clear_field_object(field) clear_field (field,GstObject *,gst_vulkan_trash_object_unref);
651
652 clear_field_mini_object (descriptor_set);
653 clear_field_mini_object (framebuffer);
654 clear_field_mini_object (sampler);
655 clear_field_mini_object (pipeline_layout);
656 clear_field_mini_object (graphics_pipeline);
657 clear_field_mini_object (descriptor_set_layout);
658 clear_field_object (cmd_pool);
659 clear_field_object (descriptor_cache);
660
661 static void
clear_shaders(GstVulkanFullScreenQuad * self)662 clear_shaders (GstVulkanFullScreenQuad * self)
663 {
664 GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
665 GstVulkanFence *last_fence =
666 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
667
668 if (priv->vert)
669 gst_vulkan_trash_list_add (self->trash_list,
670 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
671 gst_vulkan_trash_mini_object_unref, (GstMiniObject *) priv->vert));
672 priv->vert = NULL;
673
674 if (priv->frag)
675 gst_vulkan_trash_list_add (self->trash_list,
676 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
677 gst_vulkan_trash_mini_object_unref, (GstMiniObject *) priv->frag));
678 priv->frag = NULL;
679
680 gst_vulkan_fence_unref (last_fence);
681 }
682
683 static void
clear_uniform_data(GstVulkanFullScreenQuad * self)684 clear_uniform_data (GstVulkanFullScreenQuad * self)
685 {
686 GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
687 GstVulkanFence *last_fence =
688 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
689
690 if (priv->uniforms)
691 gst_vulkan_trash_list_add (self->trash_list,
692 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
693 gst_vulkan_trash_mini_object_unref,
694 (GstMiniObject *) priv->uniforms));
695 priv->uniforms = NULL;
696 priv->uniform_size = 0;
697
698 gst_vulkan_fence_unref (last_fence);
699 }
700
701 static void
clear_index_data(GstVulkanFullScreenQuad * self)702 clear_index_data (GstVulkanFullScreenQuad * self)
703 {
704 GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
705 GstVulkanFence *last_fence =
706 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
707
708 if (priv->indices)
709 gst_vulkan_trash_list_add (self->trash_list,
710 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
711 gst_vulkan_trash_mini_object_unref,
712 (GstMiniObject *) priv->indices));
713 priv->indices = NULL;
714 priv->n_indices = 0;
715
716 gst_vulkan_fence_unref (last_fence);
717 }
718
719 static void
clear_vertex_data(GstVulkanFullScreenQuad * self)720 clear_vertex_data (GstVulkanFullScreenQuad * self)
721 {
722 GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
723 GstVulkanFence *last_fence =
724 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
725
726 if (priv->vertices)
727 gst_vulkan_trash_list_add (self->trash_list,
728 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
729 gst_vulkan_trash_mini_object_unref,
730 (GstMiniObject *) priv->vertices));
731 priv->vertices = NULL;
732
733 gst_vulkan_fence_unref (last_fence);
734 }
735
736 static void
clear_render_pass(GstVulkanFullScreenQuad * self)737 clear_render_pass (GstVulkanFullScreenQuad * self)
738 {
739 GstVulkanFence *last_fence =
740 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
741
742 if (self->render_pass)
743 gst_vulkan_trash_list_add (self->trash_list,
744 gst_vulkan_trash_list_acquire (self->trash_list, last_fence,
745 gst_vulkan_trash_mini_object_unref,
746 (GstMiniObject *) self->render_pass));
747 self->render_pass = NULL;
748
749 gst_vulkan_fence_unref (last_fence);
750 }
751
752 static void
destroy_pipeline(GstVulkanFullScreenQuad * self)753 destroy_pipeline (GstVulkanFullScreenQuad * self)
754 {
755 GstVulkanFence *last_fence =
756 LAST_FENCE_OR_ALWAYS_SIGNALLED (self, self->queue->device);
757
758 clear_render_pass (self);
759 clear_pipeline_layout (self);
760 clear_graphics_pipeline (self);
761 clear_descriptor_set_layout (self);
762
763 gst_vulkan_fence_unref (last_fence);
764
765 gst_vulkan_trash_list_gc (self->trash_list);
766 }
767
768 void
gst_vulkan_full_screen_quad_init(GstVulkanFullScreenQuad * self)769 gst_vulkan_full_screen_quad_init (GstVulkanFullScreenQuad * self)
770 {
771 self->trash_list = gst_vulkan_trash_fence_list_new ();
772 }
773
774 /**
775 * gst_vulkan_full_screen_quad_new:
776 * @queue: a #GstVulkanQueue
777 *
778 * Returns: (transfer full): a new #GstVulkanFullScreenQuad
779 *
780 * Since: 1.18
781 */
782 GstVulkanFullScreenQuad *
gst_vulkan_full_screen_quad_new(GstVulkanQueue * queue)783 gst_vulkan_full_screen_quad_new (GstVulkanQueue * queue)
784 {
785 GstVulkanFullScreenQuad *self;
786
787 g_return_val_if_fail (GST_IS_VULKAN_QUEUE (queue), NULL);
788
789 self = g_object_new (GST_TYPE_VULKAN_FULL_SCREEN_QUAD, NULL);
790 self->queue = gst_object_ref (queue);
791
792 gst_object_ref_sink (self);
793
794 return self;
795 }
796
797 static void
gst_vulkan_full_screen_quad_finalize(GObject * object)798 gst_vulkan_full_screen_quad_finalize (GObject * object)
799 {
800 GstVulkanFullScreenQuad *self = GST_VULKAN_FULL_SCREEN_QUAD (object);
801 GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
802
803 destroy_pipeline (self);
804 clear_cmd_pool (self);
805 clear_sampler (self);
806 clear_framebuffer (self);
807 clear_descriptor_set (self);
808 clear_descriptor_cache (self);
809 clear_shaders (self);
810 clear_uniform_data (self);
811 clear_index_data (self);
812 clear_vertex_data (self);
813
814 gst_vulkan_trash_list_wait (self->trash_list, -1);
815 gst_vulkan_trash_list_gc (self->trash_list);
816 gst_clear_object (&self->trash_list);
817
818 gst_clear_mini_object (((GstMiniObject **) & self->last_fence));
819
820 gst_clear_object (&self->queue);
821
822 gst_clear_buffer (&priv->inbuf);
823 gst_clear_buffer (&priv->outbuf);
824
825 G_OBJECT_CLASS (gst_vulkan_full_screen_quad_parent_class)->finalize (object);
826 }
827
828 static void
gst_vulkan_full_screen_quad_class_init(GstVulkanFullScreenQuadClass * klass)829 gst_vulkan_full_screen_quad_class_init (GstVulkanFullScreenQuadClass * klass)
830 {
831 GObjectClass *obj_class = G_OBJECT_CLASS (klass);
832
833 obj_class->finalize = gst_vulkan_full_screen_quad_finalize;
834 }
835
836 /**
837 * gst_vulkan_full_screen_quad_set_info:
838 * @self: the #GstVulkanFullScreenQuad
839 * @in_info: the input #GstVideoInfo to set
840 * @out_info: the output #GstVideoInfo to set
841 *
842 * Returns: whether the information could be successfully set
843 *
844 * Since: 1.18
845 */
846 gboolean
gst_vulkan_full_screen_quad_set_info(GstVulkanFullScreenQuad * self,GstVideoInfo * in_info,GstVideoInfo * out_info)847 gst_vulkan_full_screen_quad_set_info (GstVulkanFullScreenQuad * self,
848 GstVideoInfo * in_info, GstVideoInfo * out_info)
849 {
850 self->out_info = *out_info;
851 self->in_info = *in_info;
852
853 destroy_pipeline (self);
854 clear_framebuffer (self);
855 clear_descriptor_set (self);
856 clear_descriptor_cache (self);
857 clear_uniform_data (self);
858
859 return TRUE;
860 }
861
862 /**
863 * gst_vulkan_full_screen_quad_set_input_buffer:
864 * @self: the #GstVulkanFullScreenQuad
865 * @buffer: the input #GstBuffer to set
866 * @error: #GError to fill on failure
867 *
868 * Returns: whether the input buffer could be changed
869 *
870 * Since: 1.18
871 */
872 gboolean
gst_vulkan_full_screen_quad_set_input_buffer(GstVulkanFullScreenQuad * self,GstBuffer * buffer,GError ** error)873 gst_vulkan_full_screen_quad_set_input_buffer (GstVulkanFullScreenQuad * self,
874 GstBuffer * buffer, GError ** error)
875 {
876 GstVulkanFullScreenQuadPrivate *priv;
877
878 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
879
880 priv = GET_PRIV (self);
881
882 gst_buffer_replace (&priv->inbuf, buffer);
883 clear_descriptor_set (self);
884 return TRUE;
885 }
886
887 /**
888 * gst_vulkan_full_screen_quad_set_output_buffer:
889 * @self: the #GstVulkanFullScreenQuad
890 * @buffer: the output #GstBuffer to set
891 * @error: #GError to fill on failure
892 *
893 * Returns: whether the input buffer could be changed
894 *
895 * Since: 1.18
896 */
897 gboolean
gst_vulkan_full_screen_quad_set_output_buffer(GstVulkanFullScreenQuad * self,GstBuffer * buffer,GError ** error)898 gst_vulkan_full_screen_quad_set_output_buffer (GstVulkanFullScreenQuad * self,
899 GstBuffer * buffer, GError ** error)
900 {
901 GstVulkanFullScreenQuadPrivate *priv;
902
903 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
904
905 priv = GET_PRIV (self);
906
907 gst_buffer_replace (&priv->outbuf, buffer);
908 clear_framebuffer (self);
909 return TRUE;
910 }
911
912 /**
913 * gst_vulkan_full_screen_quad_set_shaders:
914 * @self: the #GstVulkanFullScreenQuad
915 * @vert: the vertex shader to set
916 * @frag: the fragment shader to set
917 *
918 * Returns: whether the shaders could be set
919 *
920 * Since: 1.18
921 */
922 gboolean
gst_vulkan_full_screen_quad_set_shaders(GstVulkanFullScreenQuad * self,GstVulkanHandle * vert,GstVulkanHandle * frag)923 gst_vulkan_full_screen_quad_set_shaders (GstVulkanFullScreenQuad * self,
924 GstVulkanHandle * vert, GstVulkanHandle * frag)
925 {
926 GstVulkanFullScreenQuadPrivate *priv;
927
928 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
929 g_return_val_if_fail (vert != NULL, FALSE);
930 g_return_val_if_fail (vert->type == GST_VULKAN_HANDLE_TYPE_SHADER, FALSE);
931 g_return_val_if_fail (frag != NULL, FALSE);
932 g_return_val_if_fail (frag->type == GST_VULKAN_HANDLE_TYPE_SHADER, FALSE);
933
934 priv = GET_PRIV (self);
935
936 clear_shaders (self);
937 destroy_pipeline (self);
938
939 priv->vert = gst_vulkan_handle_ref (vert);
940 priv->frag = gst_vulkan_handle_ref (frag);
941
942 return TRUE;
943 }
944
945 /**
946 * gst_vulkan_full_screen_quad_set_uniform_buffer:
947 * @self: the #GstVulkanFullScreenQuad
948 * @uniforms: the uniform data to set. Must be a #GstVulkanBufferMemory
949 * @error: a #GError to fill on failure
950 *
951 * Returns: whether the shaders could be set
952 *
953 * Since: 1.18
954 */
955 gboolean
gst_vulkan_full_screen_quad_set_uniform_buffer(GstVulkanFullScreenQuad * self,GstMemory * uniforms,GError ** error)956 gst_vulkan_full_screen_quad_set_uniform_buffer (GstVulkanFullScreenQuad * self,
957 GstMemory * uniforms, GError ** error)
958 {
959 GstVulkanFullScreenQuadPrivate *priv;
960
961 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
962 g_return_val_if_fail (uniforms == NULL
963 || gst_is_vulkan_buffer_memory (uniforms), FALSE);
964
965 priv = GET_PRIV (self);
966
967 clear_uniform_data (self);
968 if (uniforms) {
969 priv->uniforms = gst_memory_ref (uniforms);
970 priv->uniform_size = gst_memory_get_sizes (uniforms, NULL, NULL);
971 }
972
973 return TRUE;
974 }
975
976 /**
977 * gst_vulkan_full_screen_quad_set_index_buffer:
978 * @self: the #GstVulkanFullScreenQuad
979 * @indices: the index data. Must be a #GstVulkanBufferMemory
980 * @n_indices: number of indices in @indices
981 * @error: #GError to fill on failure
982 *
983 * See also gst_vulkan_full_screen_quad_set_vertex_buffer()
984 *
985 * Returns: whether the index data could be set
986 *
987 * Since: 1.18
988 */
989 gboolean
gst_vulkan_full_screen_quad_set_index_buffer(GstVulkanFullScreenQuad * self,GstMemory * indices,gsize n_indices,GError ** error)990 gst_vulkan_full_screen_quad_set_index_buffer (GstVulkanFullScreenQuad * self,
991 GstMemory * indices, gsize n_indices, GError ** error)
992 {
993 GstVulkanFullScreenQuadPrivate *priv;
994
995 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
996 g_return_val_if_fail (indices == NULL
997 || gst_is_vulkan_buffer_memory (indices), FALSE);
998
999 priv = GET_PRIV (self);
1000
1001 clear_index_data (self);
1002 if (indices) {
1003 priv->indices = gst_memory_ref (indices);
1004 priv->n_indices = n_indices;
1005 }
1006
1007 return TRUE;
1008 }
1009
1010 /**
1011 * gst_vulkan_full_screen_quad_set_vertex_buffer:
1012 * @self: the #GstVulkanFullScreenQuad
1013 * @vertices: the vertex data. Must be a #GstVulkanBufferMemory
1014 * @error: #GError to fill on failure
1015 *
1016 * Returns: whether the index data could be set
1017 *
1018 * Since: 1.18
1019 */
1020 gboolean
gst_vulkan_full_screen_quad_set_vertex_buffer(GstVulkanFullScreenQuad * self,GstMemory * vertices,GError ** error)1021 gst_vulkan_full_screen_quad_set_vertex_buffer (GstVulkanFullScreenQuad * self,
1022 GstMemory * vertices, GError ** error)
1023 {
1024 GstVulkanFullScreenQuadPrivate *priv;
1025
1026 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
1027 g_return_val_if_fail (vertices == NULL
1028 || gst_is_vulkan_buffer_memory (vertices), FALSE);
1029
1030 priv = GET_PRIV (self);
1031
1032 clear_vertex_data (self);
1033 if (vertices) {
1034 priv->vertices = gst_memory_ref (vertices);
1035 }
1036
1037 return TRUE;
1038 }
1039
1040 static GstVulkanImageMemory *
peek_image_from_buffer(GstBuffer * buffer,guint i)1041 peek_image_from_buffer (GstBuffer * buffer, guint i)
1042 {
1043 GstMemory *mem = gst_buffer_peek_memory (buffer, i);
1044 g_return_val_if_fail (gst_is_vulkan_image_memory (mem), NULL);
1045 return (GstVulkanImageMemory *) mem;
1046 }
1047
1048 static gboolean
ensure_vertex_data(GstVulkanFullScreenQuad * self,GError ** error)1049 ensure_vertex_data (GstVulkanFullScreenQuad * self, GError ** error)
1050 {
1051 GstVulkanFullScreenQuadPrivate *priv = GET_PRIV (self);
1052 GstMapInfo map_info;
1053
1054 if (!priv->vertices) {
1055 priv->vertices = gst_vulkan_buffer_memory_alloc (self->queue->device,
1056 sizeof (vertices), VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
1057 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
1058 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
1059
1060 if (!gst_memory_map (priv->vertices, &map_info, GST_MAP_WRITE)) {
1061 g_set_error_literal (error, GST_VULKAN_ERROR, VK_ERROR_MEMORY_MAP_FAILED,
1062 "Failed to map memory");
1063 goto failure;
1064 }
1065
1066 memcpy (map_info.data, vertices, map_info.size);
1067 gst_memory_unmap (priv->vertices, &map_info);
1068 }
1069
1070 if (!priv->indices) {
1071 priv->indices = gst_vulkan_buffer_memory_alloc (self->queue->device,
1072 sizeof (indices), VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
1073 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
1074 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
1075
1076 if (!gst_memory_map (priv->indices, &map_info, GST_MAP_WRITE)) {
1077 g_set_error_literal (error, GST_VULKAN_ERROR, VK_ERROR_MEMORY_MAP_FAILED,
1078 "Failed to map memory");
1079 goto failure;
1080 }
1081
1082 memcpy (map_info.data, indices, map_info.size);
1083 gst_memory_unmap (priv->indices, &map_info);
1084
1085 priv->n_indices = G_N_ELEMENTS (indices);
1086 }
1087
1088 return TRUE;
1089
1090 failure:
1091 if (priv->vertices)
1092 gst_memory_unref (priv->vertices);
1093 priv->vertices = NULL;
1094 if (priv->indices)
1095 gst_memory_unref (priv->indices);
1096 priv->indices = NULL;
1097 priv->n_indices = 0;
1098 return FALSE;
1099 }
1100
1101 /**
1102 * gst_vulkan_full_screen_quad_draw:
1103 * @self: the #GstVulkanFullScreenQuad
1104 * @error: a #GError filled on error
1105 *
1106 * Helper function for creation and submission of a command buffer that draws
1107 * a full screen quad. If you need to add other things to the command buffer,
1108 * create the command buffer manually and call
1109 * gst_vulkan_full_screen_quad_prepare_draw(),
1110 * gst_vulkan_full_screen_quad_fill_command_buffer() and
1111 * gst_vulkan_full_screen_quad_submit() instead.
1112 *
1113 * Returns: whether the draw was successful
1114 *
1115 * Since: 1.18
1116 */
1117 gboolean
gst_vulkan_full_screen_quad_draw(GstVulkanFullScreenQuad * self,GError ** error)1118 gst_vulkan_full_screen_quad_draw (GstVulkanFullScreenQuad * self,
1119 GError ** error)
1120 {
1121 GstVulkanCommandBuffer *cmd = NULL;
1122 GstVulkanFence *fence = NULL;
1123 VkResult err;
1124
1125 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
1126
1127 fence = gst_vulkan_device_create_fence (self->queue->device, error);
1128 if (!fence)
1129 goto error;
1130
1131 if (!gst_vulkan_full_screen_quad_prepare_draw (self, fence, error))
1132 goto error;
1133
1134 if (!(cmd = gst_vulkan_command_pool_create (self->cmd_pool, error)))
1135 goto error;
1136
1137 {
1138 VkCommandBufferBeginInfo cmd_buf_info = { 0, };
1139
1140 /* *INDENT-OFF* */
1141 cmd_buf_info = (VkCommandBufferBeginInfo) {
1142 .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
1143 .pNext = NULL,
1144 .flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
1145 .pInheritanceInfo = NULL
1146 };
1147 /* *INDENT-ON* */
1148
1149 gst_vulkan_command_buffer_lock (cmd);
1150 err = vkBeginCommandBuffer (cmd->cmd, &cmd_buf_info);
1151 if (gst_vulkan_error_to_g_error (err, error, "vkBeginCommandBuffer") < 0)
1152 goto unlock_error;
1153 }
1154
1155 if (!gst_vulkan_full_screen_quad_fill_command_buffer (self, cmd, fence,
1156 error))
1157 goto unlock_error;
1158
1159 err = vkEndCommandBuffer (cmd->cmd);
1160 gst_vulkan_command_buffer_unlock (cmd);
1161 if (gst_vulkan_error_to_g_error (err, error, "vkEndCommandBuffer") < 0)
1162 goto error;
1163
1164 if (!gst_vulkan_full_screen_quad_submit (self, cmd, fence, error))
1165 goto error;
1166
1167 gst_vulkan_fence_unref (fence);
1168
1169 return TRUE;
1170
1171 unlock_error:
1172 gst_vulkan_command_buffer_unlock (cmd);
1173
1174 error:
1175 gst_clear_mini_object ((GstMiniObject **) & cmd);
1176 gst_clear_mini_object ((GstMiniObject **) & fence);
1177 return FALSE;
1178 }
1179
1180 /**
1181 * gst_vulkan_full_screen_quad_prepare_draw:
1182 * @self: the #GstVulkanFullScreenQuad
1183 * @fence: a #GstVulkanFence that will be signalled after submission
1184 * @error: a #GError filled on error
1185 *
1186 * Returns: whether the necessary information could be generated for drawing a
1187 * frame.
1188 *
1189 * Since: 1.18
1190 */
1191 gboolean
gst_vulkan_full_screen_quad_prepare_draw(GstVulkanFullScreenQuad * self,GstVulkanFence * fence,GError ** error)1192 gst_vulkan_full_screen_quad_prepare_draw (GstVulkanFullScreenQuad * self,
1193 GstVulkanFence * fence, GError ** error)
1194 {
1195 GstVulkanFullScreenQuadPrivate *priv;
1196 GstVulkanImageView *in_views[GST_VIDEO_MAX_PLANES] = { NULL, };
1197 GstVulkanImageView *out_views[GST_VIDEO_MAX_PLANES] = { NULL, };
1198 int i;
1199
1200 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
1201 g_return_val_if_fail (fence != NULL, FALSE);
1202
1203 priv = GET_PRIV (self);
1204
1205 if (!self->graphics_pipeline)
1206 if (!create_pipeline (self, error))
1207 return FALSE;
1208
1209 if (!ensure_vertex_data (self, error))
1210 goto error;
1211
1212 if (!self->descriptor_cache)
1213 if (!create_descriptor_pool (self, error))
1214 goto error;
1215
1216 if (!self->descriptor_set) {
1217 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->in_info); i++) {
1218 GstVulkanImageMemory *img_mem = peek_image_from_buffer (priv->inbuf, i);
1219 if (!gst_is_vulkan_image_memory ((GstMemory *) img_mem)) {
1220 g_set_error_literal (error, GST_VULKAN_ERROR, GST_VULKAN_FAILED,
1221 "Input memory must be a GstVulkanImageMemory");
1222 goto error;
1223 }
1224 in_views[i] = gst_vulkan_get_or_create_image_view (img_mem);
1225 gst_vulkan_trash_list_add (self->trash_list,
1226 gst_vulkan_trash_list_acquire (self->trash_list, fence,
1227 gst_vulkan_trash_mini_object_unref,
1228 (GstMiniObject *) in_views[i]));
1229 }
1230 if (!(self->descriptor_set =
1231 get_and_update_descriptor_set (self, in_views, error)))
1232 goto error;
1233 }
1234
1235 if (!self->framebuffer) {
1236 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->out_info); i++) {
1237 GstVulkanImageMemory *img_mem = peek_image_from_buffer (priv->outbuf, i);
1238 if (!gst_is_vulkan_image_memory ((GstMemory *) img_mem)) {
1239 g_set_error_literal (error, GST_VULKAN_ERROR, GST_VULKAN_FAILED,
1240 "Output memory must be a GstVulkanImageMemory");
1241 goto error;
1242 }
1243 out_views[i] = gst_vulkan_get_or_create_image_view (img_mem);
1244 gst_vulkan_trash_list_add (self->trash_list,
1245 gst_vulkan_trash_list_acquire (self->trash_list, fence,
1246 gst_vulkan_trash_mini_object_unref,
1247 (GstMiniObject *) out_views[i]));
1248 }
1249 if (!create_framebuffer (self, out_views, error))
1250 goto error;
1251 }
1252
1253 if (!self->cmd_pool)
1254 if (!(self->cmd_pool =
1255 gst_vulkan_queue_create_command_pool (self->queue, error)))
1256 goto error;
1257
1258 return TRUE;
1259
1260 error:
1261 for (i = 0; i < GST_VIDEO_MAX_PLANES; i++)
1262 gst_clear_mini_object ((GstMiniObject **) & in_views[i]);
1263 for (i = 0; i < GST_VIDEO_MAX_PLANES; i++)
1264 gst_clear_mini_object ((GstMiniObject **) & out_views[i]);
1265 return FALSE;
1266 }
1267
1268 /**
1269 * gst_vulkan_full_screen_quad_fill_command_buffer:
1270 * @self: a #GstVulkanFullScreenQuad
1271 * @cmd: the #GstVulkanCommandBuffer to fill with commands
1272 * @error: a #GError to fill on error
1273 *
1274 * Returns: whether @cmd could be filled with the necessary commands
1275 *
1276 * Since: 1.18
1277 */
1278 gboolean
gst_vulkan_full_screen_quad_fill_command_buffer(GstVulkanFullScreenQuad * self,GstVulkanCommandBuffer * cmd,GstVulkanFence * fence,GError ** error)1279 gst_vulkan_full_screen_quad_fill_command_buffer (GstVulkanFullScreenQuad * self,
1280 GstVulkanCommandBuffer * cmd, GstVulkanFence * fence, GError ** error)
1281 {
1282 GstVulkanFullScreenQuadPrivate *priv;
1283 GstVulkanImageView *in_views[GST_VIDEO_MAX_PLANES] = { NULL, };
1284 GstVulkanImageView *out_views[GST_VIDEO_MAX_PLANES] = { NULL, };
1285 int i;
1286
1287 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
1288 g_return_val_if_fail (cmd != NULL, FALSE);
1289 g_return_val_if_fail (fence != NULL, FALSE);
1290
1291 priv = GET_PRIV (self);
1292
1293 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->in_info); i++) {
1294 GstVulkanImageMemory *img_mem = peek_image_from_buffer (priv->inbuf, i);
1295 if (!gst_is_vulkan_image_memory ((GstMemory *) img_mem)) {
1296 g_set_error_literal (error, GST_VULKAN_ERROR, GST_VULKAN_FAILED,
1297 "Input memory must be a GstVulkanImageMemory");
1298 goto error;
1299 }
1300 in_views[i] = gst_vulkan_get_or_create_image_view (img_mem);
1301 gst_vulkan_trash_list_add (self->trash_list,
1302 gst_vulkan_trash_list_acquire (self->trash_list, fence,
1303 gst_vulkan_trash_mini_object_unref, (GstMiniObject *) in_views[i]));
1304 }
1305 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->out_info); i++) {
1306 GstVulkanImageMemory *img_mem = peek_image_from_buffer (priv->outbuf, i);
1307 if (!gst_is_vulkan_image_memory ((GstMemory *) img_mem)) {
1308 g_set_error_literal (error, GST_VULKAN_ERROR, GST_VULKAN_FAILED,
1309 "Output memory must be a GstVulkanImageMemory");
1310 goto error;
1311 }
1312 out_views[i] = gst_vulkan_get_or_create_image_view (img_mem);
1313 gst_vulkan_trash_list_add (self->trash_list,
1314 gst_vulkan_trash_list_acquire (self->trash_list, fence,
1315 gst_vulkan_trash_mini_object_unref,
1316 (GstMiniObject *) out_views[i]));
1317 }
1318
1319 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->in_info); i++) {
1320 /* *INDENT-OFF* */
1321 VkImageMemoryBarrier in_image_memory_barrier = {
1322 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
1323 .pNext = NULL,
1324 .srcAccessMask = in_views[i]->image->barrier.parent.access_flags,
1325 .dstAccessMask = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
1326 .oldLayout = in_views[i]->image->barrier.image_layout,
1327 .newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
1328 /* FIXME: implement exclusive transfers */
1329 .srcQueueFamilyIndex = 0,
1330 .dstQueueFamilyIndex = 0,
1331 .image = in_views[i]->image->image,
1332 .subresourceRange = in_views[i]->image->barrier.subresource_range
1333 };
1334 /* *INDENT-ON* */
1335
1336 vkCmdPipelineBarrier (cmd->cmd,
1337 in_views[i]->image->barrier.parent.pipeline_stages,
1338 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0, 0, NULL, 0, NULL, 1,
1339 &in_image_memory_barrier);
1340
1341 in_views[i]->image->barrier.parent.pipeline_stages =
1342 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
1343 in_views[i]->image->barrier.parent.access_flags =
1344 in_image_memory_barrier.dstAccessMask;
1345 in_views[i]->image->barrier.image_layout =
1346 in_image_memory_barrier.newLayout;
1347 }
1348
1349 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->out_info); i++) {
1350 /* *INDENT-OFF* */
1351 VkImageMemoryBarrier out_image_memory_barrier = {
1352 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
1353 .pNext = NULL,
1354 .srcAccessMask = out_views[i]->image->barrier.parent.access_flags,
1355 .dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
1356 .oldLayout = out_views[i]->image->barrier.image_layout,
1357 .newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
1358 /* FIXME: implement exclusive transfers */
1359 .srcQueueFamilyIndex = 0,
1360 .dstQueueFamilyIndex = 0,
1361 .image = out_views[i]->image->image,
1362 .subresourceRange = out_views[i]->image->barrier.subresource_range
1363 };
1364 /* *INDENT-ON* */
1365
1366 vkCmdPipelineBarrier (cmd->cmd,
1367 out_views[i]->image->barrier.parent.pipeline_stages,
1368 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, 0, NULL, 0, NULL, 1,
1369 &out_image_memory_barrier);
1370
1371 out_views[i]->image->barrier.parent.pipeline_stages =
1372 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
1373 out_views[i]->image->barrier.parent.access_flags =
1374 out_image_memory_barrier.dstAccessMask;
1375 out_views[i]->image->barrier.image_layout =
1376 out_image_memory_barrier.newLayout;
1377 }
1378
1379 {
1380 /* *INDENT-OFF* */
1381 VkClearValue clearColor = {{{ 0.0f, 0.0f, 0.0f, 1.0f }}};
1382 VkClearValue clearColors[GST_VIDEO_MAX_PLANES] = {
1383 clearColor, clearColor, clearColor, clearColor,
1384 };
1385 VkRenderPassBeginInfo render_pass_info = {
1386 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
1387 .renderPass = (VkRenderPass) self->render_pass->handle,
1388 .framebuffer = (VkFramebuffer) self->framebuffer->handle,
1389 .renderArea.offset = { 0, 0 },
1390 .renderArea.extent = {
1391 GST_VIDEO_INFO_WIDTH (&self->out_info),
1392 GST_VIDEO_INFO_HEIGHT (&self->out_info)
1393 },
1394 .clearValueCount = GST_VIDEO_INFO_N_PLANES (&self->out_info),
1395 .pClearValues = clearColors,
1396 };
1397 /* *INDENT-ON* */
1398 VkDeviceSize offsets[] = { 0 };
1399
1400 vkCmdBindDescriptorSets (cmd->cmd, VK_PIPELINE_BIND_POINT_GRAPHICS,
1401 (VkPipelineLayout) self->pipeline_layout->handle, 0, 1,
1402 &self->descriptor_set->set, 0, NULL);
1403
1404 vkCmdBeginRenderPass (cmd->cmd, &render_pass_info,
1405 VK_SUBPASS_CONTENTS_INLINE);
1406 vkCmdBindPipeline (cmd->cmd, VK_PIPELINE_BIND_POINT_GRAPHICS,
1407 (VkPipeline) self->graphics_pipeline->handle);
1408 vkCmdBindVertexBuffers (cmd->cmd, 0, 1,
1409 &((GstVulkanBufferMemory *) priv->vertices)->buffer, offsets);
1410 vkCmdBindIndexBuffer (cmd->cmd,
1411 ((GstVulkanBufferMemory *) priv->indices)->buffer, 0,
1412 VK_INDEX_TYPE_UINT16);
1413 vkCmdDrawIndexed (cmd->cmd, priv->n_indices, 1, 0, 0, 0);
1414 vkCmdEndRenderPass (cmd->cmd);
1415 }
1416
1417 return TRUE;
1418
1419 error:
1420 return FALSE;
1421 }
1422
1423 /**
1424 * gst_vulkan_full_screen_quad_submit:
1425 * @self: a #GstVulkanFullScreenQuad
1426 * @cmd: (transfer full): a #GstVulkanCommandBuffer to submit
1427 * @fence: a #GstVulkanFence to signal on completion
1428 * @error: a #GError to fill on error
1429 *
1430 * Returns: whether @cmd could be submitted to the queue
1431 *
1432 * Since: 1.18
1433 */
1434 gboolean
gst_vulkan_full_screen_quad_submit(GstVulkanFullScreenQuad * self,GstVulkanCommandBuffer * cmd,GstVulkanFence * fence,GError ** error)1435 gst_vulkan_full_screen_quad_submit (GstVulkanFullScreenQuad * self,
1436 GstVulkanCommandBuffer * cmd, GstVulkanFence * fence, GError ** error)
1437 {
1438 VkResult err;
1439
1440 g_return_val_if_fail (GST_IS_VULKAN_FULL_SCREEN_QUAD (self), FALSE);
1441 g_return_val_if_fail (cmd != NULL, FALSE);
1442 g_return_val_if_fail (fence != NULL, FALSE);
1443
1444 {
1445 /* *INDENT-OFF* */
1446 VkSubmitInfo submit_info = {
1447 .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
1448 .pNext = NULL,
1449 .waitSemaphoreCount = 0,
1450 .pWaitSemaphores = NULL,
1451 .pWaitDstStageMask = NULL,
1452 .commandBufferCount = 1,
1453 .pCommandBuffers = &cmd->cmd,
1454 .signalSemaphoreCount = 0,
1455 .pSignalSemaphores = NULL,
1456 };
1457 /* *INDENT-ON* */
1458
1459 gst_vulkan_queue_submit_lock (self->queue);
1460 err =
1461 vkQueueSubmit (self->queue->queue, 1, &submit_info,
1462 GST_VULKAN_FENCE_FENCE (fence));
1463 gst_vulkan_queue_submit_unlock (self->queue);
1464 if (gst_vulkan_error_to_g_error (err, error, "vkQueueSubmit") < 0)
1465 goto error;
1466 }
1467
1468 gst_vulkan_trash_list_add (self->trash_list,
1469 gst_vulkan_trash_list_acquire (self->trash_list, fence,
1470 gst_vulkan_trash_mini_object_unref, GST_MINI_OBJECT_CAST (cmd)));
1471
1472 gst_vulkan_trash_list_gc (self->trash_list);
1473
1474 if (self->last_fence)
1475 gst_vulkan_fence_unref (self->last_fence);
1476 self->last_fence = gst_vulkan_fence_ref (fence);
1477
1478 return TRUE;
1479
1480 error:
1481 return FALSE;
1482 }
1483