1 /*
2 * GStreamer
3 * Copyright (C) 2016 Matthew Waters <matthew@centricular.com>
4 *
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
9 *
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
14 *
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
19 */
20
21 #ifdef HAVE_CONFIG_H
22 #include "config.h"
23 #endif
24
25 #include "gstvkbuffermemory.h"
26
27 /**
28 * SECTION:vkbuffermemory
29 * @title: GstVulkanBufferMemory
30 * @short_description: memory subclass for Vulkan buffer memory
31 * @see_also: #GstVulkanMemory, #GstMemory, #GstAllocator
32 *
33 * #GstVulkanBufferMemory is a #GstMemory subclass providing support for the
34 * mapping of Vulkan device memory.
35 */
36
37 #define GST_CAT_DEFUALT GST_CAT_VULKAN_BUFFER_MEMORY
38 GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFUALT);
39
40 static GstAllocator *_vulkan_buffer_memory_allocator;
41
42 static gboolean
_create_info_from_args(VkBufferCreateInfo * info,gsize size,VkBufferUsageFlags usage)43 _create_info_from_args (VkBufferCreateInfo * info, gsize size,
44 VkBufferUsageFlags usage)
45 {
46 /* FIXME: validate these */
47 /* *INDENT-OFF* */
48 *info = (VkBufferCreateInfo) {
49 .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
50 .pNext = NULL,
51 .flags = 0,
52 .size = size,
53 .usage = usage,
54 .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
55 .queueFamilyIndexCount = 0,
56 .pQueueFamilyIndices = NULL,
57 };
58 /* *INDENT-ON* */
59
60 return TRUE;
61 }
62
63 static void
_vk_buffer_mem_init(GstVulkanBufferMemory * mem,GstAllocator * allocator,GstMemory * parent,GstVulkanDevice * device,VkBufferUsageFlags usage,GstAllocationParams * params,gsize size,gpointer user_data,GDestroyNotify notify)64 _vk_buffer_mem_init (GstVulkanBufferMemory * mem, GstAllocator * allocator,
65 GstMemory * parent, GstVulkanDevice * device, VkBufferUsageFlags usage,
66 GstAllocationParams * params, gsize size, gpointer user_data,
67 GDestroyNotify notify)
68 {
69 gsize align = gst_memory_alignment, offset = 0, maxsize = size;
70 GstMemoryFlags flags = 0;
71
72 if (params) {
73 flags = params->flags;
74 align |= params->align;
75 offset = params->prefix;
76 maxsize += params->prefix + params->padding + align;
77 }
78
79 gst_memory_init (GST_MEMORY_CAST (mem), flags, allocator, parent, maxsize,
80 align, offset, size);
81
82 mem->device = gst_object_ref (device);
83 mem->usage = usage;
84 mem->wrapped = FALSE;
85 mem->notify = notify;
86 mem->user_data = user_data;
87
88 mem->barrier.parent.type = GST_VULKAN_BARRIER_TYPE_BUFFER;
89 mem->barrier.parent.pipeline_stages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
90 mem->barrier.parent.access_flags = 0;
91
92 g_mutex_init (&mem->lock);
93
94 GST_CAT_DEBUG (GST_CAT_VULKAN_BUFFER_MEMORY,
95 "new Vulkan Buffer memory:%p size:%" G_GSIZE_FORMAT, mem, maxsize);
96 }
97
98 static GstVulkanBufferMemory *
_vk_buffer_mem_new_alloc(GstAllocator * allocator,GstMemory * parent,GstVulkanDevice * device,gsize size,VkBufferUsageFlags usage,VkMemoryPropertyFlags mem_prop_flags,gpointer user_data,GDestroyNotify notify)99 _vk_buffer_mem_new_alloc (GstAllocator * allocator, GstMemory * parent,
100 GstVulkanDevice * device, gsize size, VkBufferUsageFlags usage,
101 VkMemoryPropertyFlags mem_prop_flags, gpointer user_data,
102 GDestroyNotify notify)
103 {
104 GstVulkanBufferMemory *mem = NULL;
105 GstAllocationParams params = { 0, };
106 VkBufferCreateInfo buffer_info;
107 GError *error = NULL;
108 guint32 type_idx;
109 VkBuffer buffer;
110 VkResult err;
111
112 if (!_create_info_from_args (&buffer_info, size, usage)) {
113 GST_CAT_ERROR (GST_CAT_VULKAN_BUFFER_MEMORY, "Incorrect buffer parameters");
114 goto error;
115 }
116
117 err = vkCreateBuffer (device->device, &buffer_info, NULL, &buffer);
118 if (gst_vulkan_error_to_g_error (err, &error, "vkCreateBuffer") < 0)
119 goto vk_error;
120
121 mem = g_new0 (GstVulkanBufferMemory, 1);
122 vkGetBufferMemoryRequirements (device->device, buffer, &mem->requirements);
123
124 if ((mem->requirements.alignment & (mem->requirements.alignment - 1)) != 0) {
125 g_set_error_literal (&error, GST_VULKAN_ERROR, GST_VULKAN_FAILED,
126 "Vulkan implementation requires unsupported non-power-of 2 memory alignment");
127 goto vk_error;
128 }
129
130 params.align = mem->requirements.alignment - 1;
131 _vk_buffer_mem_init (mem, allocator, parent, device, usage, ¶ms, size,
132 user_data, notify);
133 mem->buffer = buffer;
134
135 if (!gst_vulkan_memory_find_memory_type_index_with_type_properties (device,
136 mem->requirements.memoryTypeBits, mem_prop_flags, &type_idx))
137 goto error;
138
139 mem->vk_mem = (GstVulkanMemory *) gst_vulkan_memory_alloc (device, type_idx,
140 ¶ms, mem->requirements.size, mem_prop_flags);
141 if (!mem->vk_mem)
142 goto error;
143
144 err = vkBindBufferMemory (device->device, buffer, mem->vk_mem->mem_ptr, 0);
145 if (gst_vulkan_error_to_g_error (err, &error, "vkBindBufferMemory") < 0)
146 goto vk_error;
147
148 return mem;
149
150 vk_error:
151 {
152 GST_CAT_ERROR (GST_CAT_VULKAN_BUFFER_MEMORY,
153 "Failed to allocate buffer memory %s", error->message);
154 g_clear_error (&error);
155 goto error;
156 }
157
158 error:
159 {
160 if (mem)
161 gst_memory_unref ((GstMemory *) mem);
162 return NULL;
163 }
164 }
165
166 static GstVulkanBufferMemory *
_vk_buffer_mem_new_wrapped(GstAllocator * allocator,GstMemory * parent,GstVulkanDevice * device,VkBuffer buffer,VkBufferUsageFlags usage,gpointer user_data,GDestroyNotify notify)167 _vk_buffer_mem_new_wrapped (GstAllocator * allocator, GstMemory * parent,
168 GstVulkanDevice * device, VkBuffer buffer, VkBufferUsageFlags usage,
169 gpointer user_data, GDestroyNotify notify)
170 {
171 GstVulkanBufferMemory *mem = g_new0 (GstVulkanBufferMemory, 1);
172 GstAllocationParams params = { 0, };
173
174 mem->buffer = buffer;
175
176 vkGetBufferMemoryRequirements (device->device, mem->buffer,
177 &mem->requirements);
178
179 params.align = mem->requirements.alignment - 1;
180 params.flags = GST_MEMORY_FLAG_NOT_MAPPABLE;
181 _vk_buffer_mem_init (mem, allocator, parent, device, usage, ¶ms,
182 mem->requirements.size, user_data, notify);
183 mem->wrapped = TRUE;
184
185 return mem;
186 }
187
188 static gpointer
_vk_buffer_mem_map_full(GstVulkanBufferMemory * mem,GstMapInfo * info,gsize size)189 _vk_buffer_mem_map_full (GstVulkanBufferMemory * mem, GstMapInfo * info,
190 gsize size)
191 {
192 GstMapInfo *vk_map_info;
193
194 /* FIXME: possible barrier needed */
195 g_mutex_lock (&mem->lock);
196
197 if (!mem->vk_mem) {
198 g_mutex_unlock (&mem->lock);
199 return NULL;
200 }
201
202 vk_map_info = g_new0 (GstMapInfo, 1);
203 info->user_data[0] = vk_map_info;
204 if (!gst_memory_map ((GstMemory *) mem->vk_mem, vk_map_info, info->flags)) {
205 g_free (vk_map_info);
206 g_mutex_unlock (&mem->lock);
207 return NULL;
208 }
209 g_mutex_unlock (&mem->lock);
210
211 return vk_map_info->data;
212 }
213
214 static void
_vk_buffer_mem_unmap_full(GstVulkanBufferMemory * mem,GstMapInfo * info)215 _vk_buffer_mem_unmap_full (GstVulkanBufferMemory * mem, GstMapInfo * info)
216 {
217 g_mutex_lock (&mem->lock);
218 gst_memory_unmap ((GstMemory *) mem->vk_mem, info->user_data[0]);
219 g_mutex_unlock (&mem->lock);
220
221 g_free (info->user_data[0]);
222 }
223
224 static GstMemory *
_vk_buffer_mem_copy(GstVulkanBufferMemory * src,gssize offset,gssize size)225 _vk_buffer_mem_copy (GstVulkanBufferMemory * src, gssize offset, gssize size)
226 {
227 return NULL;
228 }
229
230 static GstMemory *
_vk_buffer_mem_share(GstVulkanBufferMemory * mem,gssize offset,gssize size)231 _vk_buffer_mem_share (GstVulkanBufferMemory * mem, gssize offset, gssize size)
232 {
233 return NULL;
234 }
235
236 static gboolean
_vk_buffer_mem_is_span(GstVulkanBufferMemory * mem1,GstVulkanBufferMemory * mem2,gsize * offset)237 _vk_buffer_mem_is_span (GstVulkanBufferMemory * mem1,
238 GstVulkanBufferMemory * mem2, gsize * offset)
239 {
240 return FALSE;
241 }
242
243 static GstMemory *
_vk_buffer_mem_alloc(GstAllocator * allocator,gsize size,GstAllocationParams * params)244 _vk_buffer_mem_alloc (GstAllocator * allocator, gsize size,
245 GstAllocationParams * params)
246 {
247 g_critical ("Subclass should override GstAllocatorClass::alloc() function");
248
249 return NULL;
250 }
251
252 static void
_vk_buffer_mem_free(GstAllocator * allocator,GstMemory * memory)253 _vk_buffer_mem_free (GstAllocator * allocator, GstMemory * memory)
254 {
255 GstVulkanBufferMemory *mem = (GstVulkanBufferMemory *) memory;
256
257 GST_CAT_TRACE (GST_CAT_VULKAN_BUFFER_MEMORY, "freeing buffer memory:%p "
258 "id:%" G_GUINT64_FORMAT, mem, (guint64) mem->buffer);
259
260 if (mem->buffer && !mem->wrapped)
261 vkDestroyBuffer (mem->device->device, mem->buffer, NULL);
262
263 if (mem->vk_mem)
264 gst_memory_unref ((GstMemory *) mem->vk_mem);
265
266 if (mem->notify)
267 mem->notify (mem->user_data);
268
269 gst_object_unref (mem->device);
270
271 g_free (mem);
272 }
273
274 /**
275 * gst_vulkan_buffer_memory_alloc:
276 * @device: a #GstVulkanDevice
277 * @size: size of the new buffer
278 * @usage: buffer usage flags
279 * @mem_prop_flags: memory properties flags for the backing memory
280 *
281 * Allocate a new #GstVulkanBufferMemory.
282 *
283 * Returns: (transfer full): a #GstMemory object backed by a vulkan buffer
284 * backed by vulkan device memory
285 *
286 * Since: 1.18
287 */
288 GstMemory *
gst_vulkan_buffer_memory_alloc(GstVulkanDevice * device,gsize size,VkBufferUsageFlags usage,VkMemoryPropertyFlags mem_prop_flags)289 gst_vulkan_buffer_memory_alloc (GstVulkanDevice * device, gsize size,
290 VkBufferUsageFlags usage, VkMemoryPropertyFlags mem_prop_flags)
291 {
292 GstVulkanBufferMemory *mem;
293
294 mem = _vk_buffer_mem_new_alloc (_vulkan_buffer_memory_allocator, NULL, device,
295 size, usage, mem_prop_flags, NULL, NULL);
296
297 return (GstMemory *) mem;
298 }
299
300 /**
301 * gst_vulkan_buffer_memory_wrapped:
302 * @device: a #GstVulkanDevice
303 * @buffer: a `VkBuffer`
304 * @usage: usage flags of @buffer
305 * @user_data: (allow-none): user data to call @notify with
306 * @notify: (allow-none): a #GDestroyNotify called when @buffer is no longer in use
307 *
308 * Allocated a new wrapped #GstVulkanBufferMemory with @buffer.
309 *
310 * Returns: (transfer full): a #GstMemory object backed by a vulkan device memory
311 *
312 * Since: 1.18
313 */
314 GstMemory *
gst_vulkan_buffer_memory_wrapped(GstVulkanDevice * device,VkBuffer buffer,VkBufferUsageFlags usage,gpointer user_data,GDestroyNotify notify)315 gst_vulkan_buffer_memory_wrapped (GstVulkanDevice * device, VkBuffer buffer,
316 VkBufferUsageFlags usage, gpointer user_data, GDestroyNotify notify)
317 {
318 GstVulkanBufferMemory *mem;
319
320 mem =
321 _vk_buffer_mem_new_wrapped (_vulkan_buffer_memory_allocator, NULL, device,
322 buffer, usage, user_data, notify);
323
324 return (GstMemory *) mem;
325 }
326
327 G_DEFINE_TYPE (GstVulkanBufferMemoryAllocator,
328 gst_vulkan_buffer_memory_allocator, GST_TYPE_ALLOCATOR);
329
330 static void
gst_vulkan_buffer_memory_allocator_class_init(GstVulkanBufferMemoryAllocatorClass * klass)331 gst_vulkan_buffer_memory_allocator_class_init
332 (GstVulkanBufferMemoryAllocatorClass * klass)
333 {
334 GstAllocatorClass *allocator_class = (GstAllocatorClass *) klass;
335
336 allocator_class->alloc = _vk_buffer_mem_alloc;
337 allocator_class->free = _vk_buffer_mem_free;
338 }
339
340 static void
gst_vulkan_buffer_memory_allocator_init(GstVulkanBufferMemoryAllocator * allocator)341 gst_vulkan_buffer_memory_allocator_init (GstVulkanBufferMemoryAllocator *
342 allocator)
343 {
344 GstAllocator *alloc = GST_ALLOCATOR_CAST (allocator);
345
346 alloc->mem_type = GST_VULKAN_BUFFER_MEMORY_ALLOCATOR_NAME;
347 alloc->mem_map_full = (GstMemoryMapFullFunction) _vk_buffer_mem_map_full;
348 alloc->mem_unmap_full =
349 (GstMemoryUnmapFullFunction) _vk_buffer_mem_unmap_full;
350 alloc->mem_copy = (GstMemoryCopyFunction) _vk_buffer_mem_copy;
351 alloc->mem_share = (GstMemoryShareFunction) _vk_buffer_mem_share;
352 alloc->mem_is_span = (GstMemoryIsSpanFunction) _vk_buffer_mem_is_span;
353 }
354
355 /**
356 * gst_vulkan_buffer_memory_init_once:
357 *
358 * Initializes the Vulkan buffer memory allocator. It is safe to call this function
359 * multiple times. This must be called before any other #GstVulkanBufferMemory operation.
360 *
361 * Since: 1.18
362 */
363 void
gst_vulkan_buffer_memory_init_once(void)364 gst_vulkan_buffer_memory_init_once (void)
365 {
366 static gsize _init = 0;
367
368 if (g_once_init_enter (&_init)) {
369 GST_DEBUG_CATEGORY_INIT (GST_CAT_VULKAN_BUFFER_MEMORY, "vulkanbuffermemory",
370 0, "Vulkan Buffer Memory");
371
372 _vulkan_buffer_memory_allocator =
373 g_object_new (gst_vulkan_buffer_memory_allocator_get_type (), NULL);
374 gst_object_ref_sink (_vulkan_buffer_memory_allocator);
375
376 gst_allocator_register (GST_VULKAN_BUFFER_MEMORY_ALLOCATOR_NAME,
377 gst_object_ref (_vulkan_buffer_memory_allocator));
378 g_once_init_leave (&_init, 1);
379 }
380 }
381
382 /**
383 * gst_is_vulkan_buffer_memory:
384 * @mem:a #GstMemory
385 *
386 * Returns: whether the memory at @mem is a #GstVulkanBufferMemory
387 *
388 * Since: 1.18
389 */
390 gboolean
gst_is_vulkan_buffer_memory(GstMemory * mem)391 gst_is_vulkan_buffer_memory (GstMemory * mem)
392 {
393 return mem != NULL && mem->allocator != NULL &&
394 g_type_is_a (G_OBJECT_TYPE (mem->allocator),
395 GST_TYPE_VULKAN_BUFFER_MEMORY_ALLOCATOR);
396 }
397