1 /* GStreamer
2 * Copyright (C) 2020 Igalia, S.L.
3 * Author: Víctor Jáquez <vjaquez@igalia.com>
4 *
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
9 *
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
14 *
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
19 */
20
21 #ifdef HAVE_CONFIG_H
22 #include "config.h"
23 #endif
24
25 #include "gstvaallocator.h"
26
27 #include <sys/types.h>
28 #include <unistd.h>
29
30 #include "gstvacaps.h"
31 #include "gstvasurfacecopy.h"
32 #include "gstvavideoformat.h"
33 #include "vasurfaceimage.h"
34
35 #define GST_CAT_DEFAULT gst_va_memory_debug
36 GST_DEBUG_CATEGORY (gst_va_memory_debug);
37
38 static void
_init_debug_category(void)39 _init_debug_category (void)
40 {
41 #ifndef GST_DISABLE_GST_DEBUG
42 static gsize _init = 0;
43
44 if (g_once_init_enter (&_init)) {
45 GST_DEBUG_CATEGORY_INIT (gst_va_memory_debug, "vamemory", 0, "VA memory");
46 g_once_init_leave (&_init, 1);
47 }
48 #endif
49 }
50
51 /*=========================== Quarks for GstMemory ===========================*/
52
53 static GQuark
gst_va_buffer_surface_quark(void)54 gst_va_buffer_surface_quark (void)
55 {
56 static gsize surface_quark = 0;
57
58 if (g_once_init_enter (&surface_quark)) {
59 GQuark quark = g_quark_from_string ("GstVaBufferSurface");
60 g_once_init_leave (&surface_quark, quark);
61 }
62
63 return surface_quark;
64 }
65
66 static GQuark
gst_va_drm_mod_quark(void)67 gst_va_drm_mod_quark (void)
68 {
69 static gsize drm_mod_quark = 0;
70
71 if (g_once_init_enter (&drm_mod_quark)) {
72 GQuark quark = g_quark_from_string ("DRMModifier");
73 g_once_init_leave (&drm_mod_quark, quark);
74 }
75
76 return drm_mod_quark;
77 }
78
79 static GQuark
gst_va_buffer_aux_surface_quark(void)80 gst_va_buffer_aux_surface_quark (void)
81 {
82 static gsize surface_quark = 0;
83
84 if (g_once_init_enter (&surface_quark)) {
85 GQuark quark = g_quark_from_string ("GstVaBufferAuxSurface");
86 g_once_init_leave (&surface_quark, quark);
87 }
88
89 return surface_quark;
90 }
91
92 /*========================= GstVaBufferSurface ===============================*/
93
94 typedef struct _GstVaBufferSurface GstVaBufferSurface;
95 struct _GstVaBufferSurface
96 {
97 GstVaDisplay *display;
98 VASurfaceID surface;
99 guint n_mems;
100 GstMemory *mems[GST_VIDEO_MAX_PLANES];
101 gint ref_count;
102 gint ref_mems_count;
103 };
104
105 static void
gst_va_buffer_surface_unref(gpointer data)106 gst_va_buffer_surface_unref (gpointer data)
107 {
108 GstVaBufferSurface *buf = data;
109
110 g_return_if_fail (buf && GST_IS_VA_DISPLAY (buf->display));
111
112 if (g_atomic_int_dec_and_test (&buf->ref_count)) {
113 GST_LOG_OBJECT (buf->display, "Destroying surface %#x", buf->surface);
114 va_destroy_surfaces (buf->display, &buf->surface, 1);
115 gst_clear_object (&buf->display);
116 g_slice_free (GstVaBufferSurface, buf);
117 }
118 }
119
120 static GstVaBufferSurface *
gst_va_buffer_surface_new(VASurfaceID surface,GstVideoFormat format,gint width,gint height)121 gst_va_buffer_surface_new (VASurfaceID surface, GstVideoFormat format,
122 gint width, gint height)
123 {
124 GstVaBufferSurface *buf = g_slice_new (GstVaBufferSurface);
125
126 g_atomic_int_set (&buf->ref_count, 0);
127 g_atomic_int_set (&buf->ref_mems_count, 0);
128 buf->surface = surface;
129 buf->display = NULL;
130 buf->n_mems = 0;
131
132 return buf;
133 }
134
135 /*=========================== GstVaMemoryPool ================================*/
136
137 /* queue for disposed surfaces */
138 typedef struct _GstVaMemoryPool GstVaMemoryPool;
139 struct _GstVaMemoryPool
140 {
141 GstAtomicQueue *queue;
142 gint surface_count;
143
144 GMutex lock;
145 };
146
147 #define GST_VA_MEMORY_POOL_CAST(obj) ((GstVaMemoryPool *)obj)
148 #define GST_VA_MEMORY_POOL_LOCK(obj) g_mutex_lock (&GST_VA_MEMORY_POOL_CAST(obj)->lock)
149 #define GST_VA_MEMORY_POOL_UNLOCK(obj) g_mutex_unlock (&GST_VA_MEMORY_POOL_CAST(obj)->lock)
150
151 static void
gst_va_memory_pool_init(GstVaMemoryPool * self)152 gst_va_memory_pool_init (GstVaMemoryPool * self)
153 {
154 self->queue = gst_atomic_queue_new (2);
155
156 g_mutex_init (&self->lock);
157
158 self->surface_count = 0;
159 }
160
161 static void
gst_va_memory_pool_finalize(GstVaMemoryPool * self)162 gst_va_memory_pool_finalize (GstVaMemoryPool * self)
163 {
164 g_mutex_clear (&self->lock);
165
166 gst_atomic_queue_unref (self->queue);
167 }
168
169 static void
gst_va_memory_pool_flush_unlocked(GstVaMemoryPool * self,GstVaDisplay * display)170 gst_va_memory_pool_flush_unlocked (GstVaMemoryPool * self,
171 GstVaDisplay * display)
172 {
173 GstMemory *mem;
174 GstVaBufferSurface *buf;
175
176 while ((mem = gst_atomic_queue_pop (self->queue))) {
177 /* destroy the surface */
178 buf = gst_mini_object_get_qdata (GST_MINI_OBJECT (mem),
179 gst_va_buffer_surface_quark ());
180 if (buf) {
181 if (g_atomic_int_dec_and_test (&buf->ref_count)) {
182 GST_LOG ("Destroying surface %#x", buf->surface);
183 va_destroy_surfaces (display, &buf->surface, 1);
184 self->surface_count -= 1; /* GstVaDmabufAllocator */
185 g_slice_free (GstVaBufferSurface, buf);
186 }
187 } else {
188 self->surface_count -= 1; /* GstVaAllocator */
189 }
190
191 GST_MINI_OBJECT_CAST (mem)->dispose = NULL;
192 /* when mem are pushed available queue its allocator is unref,
193 * then now it is required to ref the allocator here because
194 * memory's finalize will unref it again */
195 gst_object_ref (mem->allocator);
196 gst_memory_unref (mem);
197 }
198 }
199
200 static void
gst_va_memory_pool_flush(GstVaMemoryPool * self,GstVaDisplay * display)201 gst_va_memory_pool_flush (GstVaMemoryPool * self, GstVaDisplay * display)
202 {
203 GST_VA_MEMORY_POOL_LOCK (self);
204 gst_va_memory_pool_flush_unlocked (self, display);
205 GST_VA_MEMORY_POOL_UNLOCK (self);
206 }
207
208 static inline void
gst_va_memory_pool_push(GstVaMemoryPool * self,GstMemory * mem)209 gst_va_memory_pool_push (GstVaMemoryPool * self, GstMemory * mem)
210 {
211 gst_atomic_queue_push (self->queue, gst_memory_ref (mem));
212 }
213
214 static inline GstMemory *
gst_va_memory_pool_pop(GstVaMemoryPool * self)215 gst_va_memory_pool_pop (GstVaMemoryPool * self)
216 {
217 return gst_atomic_queue_pop (self->queue);
218 }
219
220 static inline GstMemory *
gst_va_memory_pool_peek(GstVaMemoryPool * self)221 gst_va_memory_pool_peek (GstVaMemoryPool * self)
222 {
223 return gst_atomic_queue_peek (self->queue);
224 }
225
226 static inline guint
gst_va_memory_pool_surface_count(GstVaMemoryPool * self)227 gst_va_memory_pool_surface_count (GstVaMemoryPool * self)
228 {
229 return g_atomic_int_get (&self->surface_count);
230 }
231
232 static inline void
gst_va_memory_pool_surface_inc(GstVaMemoryPool * self)233 gst_va_memory_pool_surface_inc (GstVaMemoryPool * self)
234 {
235 g_atomic_int_inc (&self->surface_count);
236 }
237
238 /*=========================== GstVaDmabufAllocator ===========================*/
239
240 struct _GstVaDmabufAllocator
241 {
242 GstDmaBufAllocator parent;
243
244 GstVaDisplay *display;
245
246 GstMemoryMapFunction parent_map;
247 GstMemoryCopyFunction parent_copy;
248
249 GstVideoInfo info;
250 guint usage_hint;
251
252 GstVaSurfaceCopy *copy;
253
254 GstVaMemoryPool pool;
255 };
256
257 #define gst_va_dmabuf_allocator_parent_class dmabuf_parent_class
258 G_DEFINE_TYPE_WITH_CODE (GstVaDmabufAllocator, gst_va_dmabuf_allocator,
259 GST_TYPE_DMABUF_ALLOCATOR, _init_debug_category ());
260
261 static GstVaSurfaceCopy *
_ensure_surface_copy(GstVaSurfaceCopy ** old,GstVaDisplay * display,GstVideoInfo * info)262 _ensure_surface_copy (GstVaSurfaceCopy ** old, GstVaDisplay * display,
263 GstVideoInfo * info)
264 {
265 GstVaSurfaceCopy *surface_copy;
266
267 surface_copy = g_atomic_pointer_get (old);
268 if (!surface_copy) {
269 surface_copy = gst_va_surface_copy_new (display, info);
270
271 /* others create a new one and set it before us */
272 if (surface_copy &&
273 !g_atomic_pointer_compare_and_exchange (old, NULL, surface_copy)) {
274 gst_va_surface_copy_free (surface_copy);
275 surface_copy = g_atomic_pointer_get (old);
276 }
277 }
278
279 return surface_copy;
280 }
281
282 /* If a buffer contains multiple memories (dmabuf objects) its very
283 * difficult to provide a realiable way to fast-copy single memories:
284 * While VA API sees surfaces with dependant dmabufs, GStreamer only
285 * copies dmabufs in isolation; trying to solve it while keeping a
286 * reference of the copied buffer and dmabuf index is very fragile. */
287 static GstMemory *
gst_va_dmabuf_mem_copy(GstMemory * gmem,gssize offset,gssize size)288 gst_va_dmabuf_mem_copy (GstMemory * gmem, gssize offset, gssize size)
289 {
290 GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (gmem->allocator);
291 GstVaBufferSurface *buf;
292 guint64 *drm_mod;
293 gsize mem_size;
294
295 buf = gst_mini_object_get_qdata (GST_MINI_OBJECT (gmem),
296 gst_va_buffer_surface_quark ());
297
298 drm_mod = gst_mini_object_get_qdata (GST_MINI_OBJECT (gmem),
299 gst_va_drm_mod_quark ());
300
301 /* 0 is DRM_FORMAT_MOD_LINEAR, we do not include its header now. */
302 if (buf->n_mems > 1 && *drm_mod != 0) {
303 GST_ERROR_OBJECT (self, "Failed to copy multi-dmabuf because non-linear "
304 "modifier: %#lx.", *drm_mod);
305 return NULL;
306 }
307
308 /* check if it's full memory copy */
309 mem_size = gst_memory_get_sizes (gmem, NULL, NULL);
310
311 if (size == -1)
312 size = mem_size > offset ? mem_size - offset : 0;
313
314 /* @XXX: if one-memory buffer it's possible to copy */
315 if (offset == 0 && size == mem_size && buf->n_mems == 1) {
316 GstVaBufferSurface *buf_copy = NULL;
317 GstMemory *copy;
318 GstVaSurfaceCopy *copy_func;
319
320 GST_VA_MEMORY_POOL_LOCK (&self->pool);
321 copy = gst_va_memory_pool_pop (&self->pool);
322 GST_VA_MEMORY_POOL_UNLOCK (&self->pool);
323
324 if (copy) {
325 gst_object_ref (copy->allocator);
326
327 buf_copy = gst_mini_object_get_qdata (GST_MINI_OBJECT (copy),
328 gst_va_buffer_surface_quark ());
329
330 g_assert (g_atomic_int_get (&buf_copy->ref_mems_count) == 0);
331
332 g_atomic_int_add (&buf_copy->ref_mems_count, 1);
333 } else {
334 GstBuffer *buffer = gst_buffer_new ();
335
336 if (!gst_va_dmabuf_allocator_setup_buffer (gmem->allocator, buffer)) {
337 GST_WARNING_OBJECT (self, "Failed to create a new dmabuf memory");
338 return NULL;
339 }
340
341 copy = gst_buffer_get_memory (buffer, 0);
342 gst_buffer_unref (buffer);
343
344 buf_copy = gst_mini_object_get_qdata (GST_MINI_OBJECT (copy),
345 gst_va_buffer_surface_quark ());
346 }
347
348 g_assert (buf_copy->n_mems == 1);
349
350 copy_func = _ensure_surface_copy (&self->copy, self->display, &self->info);
351 if (copy_func && gst_va_surface_copy (copy_func, buf_copy->surface,
352 buf->surface))
353 return copy;
354
355 gst_memory_unref (copy);
356
357 /* try system memory */
358 }
359
360 if (*drm_mod != 0) {
361 GST_ERROR_OBJECT (self, "Failed to copy dmabuf because non-linear "
362 "modifier: %#lx.", *drm_mod);
363 return NULL;
364 }
365
366 /* fallback to system memory */
367 return self->parent_copy (gmem, offset, size);
368
369 }
370
371 static gpointer
gst_va_dmabuf_mem_map(GstMemory * gmem,gsize maxsize,GstMapFlags flags)372 gst_va_dmabuf_mem_map (GstMemory * gmem, gsize maxsize, GstMapFlags flags)
373 {
374 GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (gmem->allocator);
375 VASurfaceID surface = gst_va_memory_get_surface (gmem);
376 guint64 *drm_mod;
377
378 drm_mod = gst_mini_object_get_qdata (GST_MINI_OBJECT (gmem),
379 gst_va_drm_mod_quark ());
380
381 /* 0 is DRM_FORMAT_MOD_LINEAR, we do not include its header now. */
382 if (*drm_mod != 0) {
383 GST_ERROR_OBJECT (self, "Failed to map the dmabuf because the modifier "
384 "is: %#lx, which is not linear.", *drm_mod);
385 return NULL;
386 }
387
388 va_sync_surface (self->display, surface);
389
390 return self->parent_map (gmem, maxsize, flags);
391 }
392
393 static void
gst_va_dmabuf_allocator_finalize(GObject * object)394 gst_va_dmabuf_allocator_finalize (GObject * object)
395 {
396 GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (object);
397
398 g_clear_pointer (&self->copy, gst_va_surface_copy_free);
399 gst_va_memory_pool_finalize (&self->pool);
400 gst_clear_object (&self->display);
401
402 G_OBJECT_CLASS (dmabuf_parent_class)->finalize (object);
403 }
404
405 static void
gst_va_dmabuf_allocator_dispose(GObject * object)406 gst_va_dmabuf_allocator_dispose (GObject * object)
407 {
408 GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (object);
409
410 gst_va_memory_pool_flush_unlocked (&self->pool, self->display);
411 if (gst_va_memory_pool_surface_count (&self->pool) != 0) {
412 GST_WARNING_OBJECT (self, "Surfaces leaked: %d",
413 gst_va_memory_pool_surface_count (&self->pool));
414 }
415
416 G_OBJECT_CLASS (dmabuf_parent_class)->dispose (object);
417 }
418
419 static void
gst_va_dmabuf_allocator_class_init(GstVaDmabufAllocatorClass * klass)420 gst_va_dmabuf_allocator_class_init (GstVaDmabufAllocatorClass * klass)
421 {
422 GObjectClass *object_class = G_OBJECT_CLASS (klass);
423
424 object_class->dispose = gst_va_dmabuf_allocator_dispose;
425 object_class->finalize = gst_va_dmabuf_allocator_finalize;
426 }
427
428 static void
gst_va_dmabuf_allocator_init(GstVaDmabufAllocator * self)429 gst_va_dmabuf_allocator_init (GstVaDmabufAllocator * self)
430 {
431 GstAllocator *allocator = GST_ALLOCATOR (self);
432
433 self->parent_map = allocator->mem_map;
434 allocator->mem_map = gst_va_dmabuf_mem_map;
435 self->parent_copy = allocator->mem_copy;
436 allocator->mem_copy = gst_va_dmabuf_mem_copy;
437
438 gst_va_memory_pool_init (&self->pool);
439 }
440
441 GstAllocator *
gst_va_dmabuf_allocator_new(GstVaDisplay * display)442 gst_va_dmabuf_allocator_new (GstVaDisplay * display)
443 {
444 GstVaDmabufAllocator *self;
445
446 g_return_val_if_fail (GST_IS_VA_DISPLAY (display), NULL);
447
448 self = g_object_new (GST_TYPE_VA_DMABUF_ALLOCATOR, NULL);
449 self->display = gst_object_ref (display);
450 gst_object_ref_sink (self);
451
452 return GST_ALLOCATOR (self);
453 }
454
455 static inline goffset
_get_fd_size(gint fd)456 _get_fd_size (gint fd)
457 {
458 return lseek (fd, 0, SEEK_END);
459 }
460
461 static gboolean
gst_va_dmabuf_memory_release(GstMiniObject * mini_object)462 gst_va_dmabuf_memory_release (GstMiniObject * mini_object)
463 {
464 GstMemory *mem = GST_MEMORY_CAST (mini_object);
465 GstVaBufferSurface *buf;
466 GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (mem->allocator);
467 guint i;
468
469 buf = gst_mini_object_get_qdata (GST_MINI_OBJECT (mem),
470 gst_va_buffer_surface_quark ());
471 if (!buf)
472 return TRUE; /* free this unknown buffer */
473
474 /* if this is the last reference to the GstVaBufferSurface, iterates
475 * its array of memories to push them into the queue with thread
476 * safetly. */
477 GST_VA_MEMORY_POOL_LOCK (&self->pool);
478 if (g_atomic_int_dec_and_test (&buf->ref_mems_count)) {
479 for (i = 0; i < buf->n_mems; i++) {
480 GST_LOG_OBJECT (self, "releasing %p: dmabuf %d, va surface %#x",
481 buf->mems[i], gst_dmabuf_memory_get_fd (buf->mems[i]), buf->surface);
482 gst_va_memory_pool_push (&self->pool, buf->mems[i]);
483 }
484 }
485 GST_VA_MEMORY_POOL_UNLOCK (&self->pool);
486
487 /* note: if ref_mem_count doesn't reach zero, that memory will
488 * "float" until it's pushed back into the pool by the last va
489 * buffer surface ref */
490
491 /* Keep last in case we are holding on the last allocator ref */
492 gst_object_unref (mem->allocator);
493
494 /* don't call mini_object's free */
495 return FALSE;
496 }
497
498 /* Creates an exported VASurface and adds it as @buffer's memories
499 * qdata
500 *
501 * If @info is not NULL, a dummy (non-pooled) buffer is created to
502 * update offsets and strides, and it has to be unrefed immediately.
503 */
504 static gboolean
gst_va_dmabuf_allocator_setup_buffer_full(GstAllocator * allocator,GstBuffer * buffer,GstVideoInfo * info)505 gst_va_dmabuf_allocator_setup_buffer_full (GstAllocator * allocator,
506 GstBuffer * buffer, GstVideoInfo * info)
507 {
508 GstVaBufferSurface *buf;
509 GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (allocator);
510 GstVideoFormat format;
511 VADRMPRIMESurfaceDescriptor desc = { 0, };
512 VASurfaceAttribExternalBuffers *extbuf = NULL, ext_buf;
513 VASurfaceID surface;
514 guint32 i, fourcc, rt_format, export_flags;
515 GDestroyNotify buffer_destroy = NULL;
516
517 g_return_val_if_fail (GST_IS_VA_DMABUF_ALLOCATOR (allocator), FALSE);
518
519 format = GST_VIDEO_INFO_FORMAT (&self->info);
520 fourcc = gst_va_fourcc_from_video_format (format);
521 rt_format = gst_va_chroma_from_video_format (format);
522 if (fourcc == 0 || rt_format == 0) {
523 GST_ERROR_OBJECT (allocator, "Unsupported format: %s",
524 gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (&self->info)));
525 return FALSE;
526 }
527
528 /* HACK(victor): disable tiling for i965 driver for RGB formats */
529 if (gst_va_display_is_implementation (self->display,
530 GST_VA_IMPLEMENTATION_INTEL_I965)
531 && GST_VIDEO_INFO_IS_RGB (&self->info)) {
532 /* *INDENT-OFF* */
533 ext_buf = (VASurfaceAttribExternalBuffers) {
534 .width = GST_VIDEO_INFO_WIDTH (&self->info),
535 .height = GST_VIDEO_INFO_HEIGHT (&self->info),
536 .num_planes = GST_VIDEO_INFO_N_PLANES (&self->info),
537 .pixel_format = fourcc,
538 };
539 /* *INDENT-ON* */
540
541 extbuf = &ext_buf;
542 }
543
544 if (!va_create_surfaces (self->display, rt_format, fourcc,
545 GST_VIDEO_INFO_WIDTH (&self->info),
546 GST_VIDEO_INFO_HEIGHT (&self->info), self->usage_hint, extbuf,
547 &surface, 1))
548 return FALSE;
549
550 /* workaround for missing layered dmabuf formats in i965 */
551 if (gst_va_display_is_implementation (self->display,
552 GST_VA_IMPLEMENTATION_INTEL_I965)
553 && (fourcc == VA_FOURCC_YUY2 || fourcc == VA_FOURCC_UYVY)) {
554 /* These are not representable as separate planes */
555 export_flags = VA_EXPORT_SURFACE_COMPOSED_LAYERS;
556 } else {
557 /* Each layer will contain exactly one plane. For example, an NV12
558 * surface will be exported as two layers */
559 export_flags = VA_EXPORT_SURFACE_SEPARATE_LAYERS;
560 }
561
562 export_flags |= VA_EXPORT_SURFACE_READ_WRITE;
563
564 if (!va_export_surface_to_dmabuf (self->display, surface, export_flags,
565 &desc))
566 goto failed;
567
568 g_assert (GST_VIDEO_INFO_N_PLANES (&self->info) == desc.num_layers);
569
570 if (fourcc != desc.fourcc) {
571 GST_ERROR ("Unsupported fourcc: %" GST_FOURCC_FORMAT,
572 GST_FOURCC_ARGS (desc.fourcc));
573 goto failed;
574 }
575
576 if (desc.num_objects == 0) {
577 GST_ERROR ("Failed to export surface to dmabuf");
578 goto failed;
579 }
580
581 buf = gst_va_buffer_surface_new (surface, format, desc.width, desc.height);
582 if (G_UNLIKELY (info)) {
583 *info = self->info;
584 GST_VIDEO_INFO_SIZE (info) = 0;
585 }
586
587 buf->n_mems = desc.num_objects;
588
589 for (i = 0; i < desc.num_objects; i++) {
590 gint fd = desc.objects[i].fd;
591 gsize size = desc.objects[i].size > 0 ?
592 desc.objects[i].size : _get_fd_size (fd);
593 GstMemory *mem = gst_dmabuf_allocator_alloc (allocator, fd, size);
594 guint64 *drm_mod = g_new (guint64, 1);
595
596 gst_buffer_append_memory (buffer, mem);
597 buf->mems[i] = mem;
598
599 if (G_LIKELY (!info)) {
600 GST_MINI_OBJECT (mem)->dispose = gst_va_dmabuf_memory_release;
601 g_atomic_int_add (&buf->ref_mems_count, 1);
602 } else {
603 /* if no @info, surface will be destroyed as soon as buffer is
604 * destroyed (e.g. gst_va_dmabuf_allocator_try()) */
605 buf->display = gst_object_ref (self->display);
606 buffer_destroy = gst_va_buffer_surface_unref;
607 }
608
609 g_atomic_int_add (&buf->ref_count, 1);
610 gst_mini_object_set_qdata (GST_MINI_OBJECT (mem),
611 gst_va_buffer_surface_quark (), buf, buffer_destroy);
612
613 *drm_mod = desc.objects[i].drm_format_modifier;
614 gst_mini_object_set_qdata (GST_MINI_OBJECT (mem), gst_va_drm_mod_quark (),
615 drm_mod, g_free);
616
617 if (G_UNLIKELY (info))
618 GST_VIDEO_INFO_SIZE (info) += size;
619
620 GST_LOG_OBJECT (self, "buffer %p: new dmabuf %d / surface %#x [%dx%d] "
621 "size %" G_GSIZE_FORMAT " drm mod %#lx", buffer, fd, surface,
622 GST_VIDEO_INFO_WIDTH (&self->info), GST_VIDEO_INFO_HEIGHT (&self->info),
623 GST_VIDEO_INFO_SIZE (&self->info), *drm_mod);
624 }
625
626 if (G_UNLIKELY (info)) {
627 for (i = 0; i < desc.num_layers; i++) {
628 g_assert (desc.layers[i].num_planes == 1);
629 GST_VIDEO_INFO_PLANE_OFFSET (info, i) = desc.layers[i].offset[0];
630 GST_VIDEO_INFO_PLANE_STRIDE (info, i) = desc.layers[i].pitch[0];
631 }
632 } else {
633 gst_va_memory_pool_surface_inc (&self->pool);
634 }
635
636 return TRUE;
637
638 failed:
639 {
640 va_destroy_surfaces (self->display, &surface, 1);
641 return FALSE;
642 }
643 }
644
645 gboolean
gst_va_dmabuf_allocator_setup_buffer(GstAllocator * allocator,GstBuffer * buffer)646 gst_va_dmabuf_allocator_setup_buffer (GstAllocator * allocator,
647 GstBuffer * buffer)
648 {
649 return gst_va_dmabuf_allocator_setup_buffer_full (allocator, buffer, NULL);
650 }
651
652 static VASurfaceID
gst_va_dmabuf_allocator_prepare_buffer_unlocked(GstVaDmabufAllocator * self,GstBuffer * buffer)653 gst_va_dmabuf_allocator_prepare_buffer_unlocked (GstVaDmabufAllocator * self,
654 GstBuffer * buffer)
655 {
656 GstMemory *mems[GST_VIDEO_MAX_PLANES] = { 0, };
657 GstVaBufferSurface *buf;
658 gint i, j, idx;
659
660 mems[0] = gst_va_memory_pool_pop (&self->pool);
661 if (!mems[0])
662 return VA_INVALID_ID;
663
664 buf = gst_mini_object_get_qdata (GST_MINI_OBJECT (mems[0]),
665 gst_va_buffer_surface_quark ());
666 if (!buf)
667 return VA_INVALID_ID;
668
669 if (buf->surface == VA_INVALID_ID)
670 return VA_INVALID_ID;
671
672 for (idx = 1; idx < buf->n_mems; idx++) {
673 /* grab next memory from queue */
674 {
675 GstMemory *mem;
676 GstVaBufferSurface *pbuf;
677
678 mem = gst_va_memory_pool_peek (&self->pool);
679 if (!mem)
680 return VA_INVALID_ID;
681
682 pbuf = gst_mini_object_get_qdata (GST_MINI_OBJECT (mem),
683 gst_va_buffer_surface_quark ());
684 if (!pbuf)
685 return VA_INVALID_ID;
686
687 if (pbuf->surface != buf->surface) {
688 GST_WARNING_OBJECT (self,
689 "expecting memory with surface %#x but got %#x: "
690 "possible memory interweaving", buf->surface, pbuf->surface);
691 return VA_INVALID_ID;
692 }
693 }
694
695 mems[idx] = gst_va_memory_pool_pop (&self->pool);
696 };
697
698 /* append memories */
699 for (i = 0; i < buf->n_mems; i++) {
700 gboolean found = FALSE;
701
702 /* find next memory to append */
703 for (j = 0; j < idx; j++) {
704 if (buf->mems[i] == mems[j]) {
705 found = TRUE;
706 break;
707 }
708 }
709
710 /* if not found, free all the popped memories and bail */
711 if (!found) {
712 if (!buf->display)
713 buf->display = gst_object_ref (self->display);
714 for (j = 0; j < idx; j++) {
715 gst_object_ref (buf->mems[j]->allocator);
716 GST_MINI_OBJECT (mems[j])->dispose = NULL;
717 gst_memory_unref (mems[j]);
718 }
719 return VA_INVALID_ID;
720 }
721
722 g_atomic_int_add (&buf->ref_mems_count, 1);
723 gst_object_ref (buf->mems[i]->allocator);
724 gst_buffer_append_memory (buffer, buf->mems[i]);
725
726 GST_LOG ("bufer %p: memory %p - dmabuf %d / surface %#x", buffer,
727 buf->mems[i], gst_dmabuf_memory_get_fd (buf->mems[i]),
728 gst_va_memory_get_surface (buf->mems[i]));
729 }
730
731 return buf->surface;
732 }
733
734 gboolean
gst_va_dmabuf_allocator_prepare_buffer(GstAllocator * allocator,GstBuffer * buffer)735 gst_va_dmabuf_allocator_prepare_buffer (GstAllocator * allocator,
736 GstBuffer * buffer)
737 {
738 GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (allocator);
739 VASurfaceID surface;
740
741 GST_VA_MEMORY_POOL_LOCK (&self->pool);
742 surface = gst_va_dmabuf_allocator_prepare_buffer_unlocked (self, buffer);
743 GST_VA_MEMORY_POOL_UNLOCK (&self->pool);
744
745 return (surface != VA_INVALID_ID);
746 }
747
748 void
gst_va_dmabuf_allocator_flush(GstAllocator * allocator)749 gst_va_dmabuf_allocator_flush (GstAllocator * allocator)
750 {
751 GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (allocator);
752
753 gst_va_memory_pool_flush (&self->pool, self->display);
754 }
755
756 static gboolean
gst_va_dmabuf_allocator_try(GstAllocator * allocator)757 gst_va_dmabuf_allocator_try (GstAllocator * allocator)
758 {
759 GstBuffer *buffer;
760 GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (allocator);
761 GstVideoInfo info = self->info;
762 gboolean ret;
763
764 buffer = gst_buffer_new ();
765 ret = gst_va_dmabuf_allocator_setup_buffer_full (allocator, buffer, &info);
766 gst_buffer_unref (buffer);
767
768 if (ret)
769 self->info = info;
770
771 return ret;
772 }
773
774 gboolean
gst_va_dmabuf_allocator_set_format(GstAllocator * allocator,GstVideoInfo * info,guint usage_hint)775 gst_va_dmabuf_allocator_set_format (GstAllocator * allocator,
776 GstVideoInfo * info, guint usage_hint)
777 {
778 GstVaDmabufAllocator *self;
779 gboolean ret;
780
781 g_return_val_if_fail (GST_IS_VA_DMABUF_ALLOCATOR (allocator), FALSE);
782 g_return_val_if_fail (info, FALSE);
783
784 self = GST_VA_DMABUF_ALLOCATOR (allocator);
785
786 if (gst_va_memory_pool_surface_count (&self->pool) != 0) {
787 if (GST_VIDEO_INFO_FORMAT (info) == GST_VIDEO_INFO_FORMAT (&self->info)
788 && GST_VIDEO_INFO_WIDTH (info) == GST_VIDEO_INFO_WIDTH (&self->info)
789 && GST_VIDEO_INFO_HEIGHT (info) == GST_VIDEO_INFO_HEIGHT (&self->info)
790 && usage_hint == self->usage_hint) {
791 *info = self->info; /* update callee info (offset & stride) */
792 return TRUE;
793 }
794 return FALSE;
795 }
796
797 self->usage_hint = usage_hint;
798 self->info = *info;
799
800 g_clear_pointer (&self->copy, gst_va_surface_copy_free);
801
802 ret = gst_va_dmabuf_allocator_try (allocator);
803
804 if (ret)
805 *info = self->info;
806
807 return ret;
808 }
809
810 gboolean
gst_va_dmabuf_allocator_get_format(GstAllocator * allocator,GstVideoInfo * info,guint * usage_hint)811 gst_va_dmabuf_allocator_get_format (GstAllocator * allocator,
812 GstVideoInfo * info, guint * usage_hint)
813 {
814 GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (allocator);
815
816 if (GST_VIDEO_INFO_FORMAT (&self->info) == GST_VIDEO_FORMAT_UNKNOWN)
817 return FALSE;
818
819 if (info)
820 *info = self->info;
821 if (usage_hint)
822 *usage_hint = self->usage_hint;
823
824 return TRUE;
825 }
826
827 /* XXX: use a surface pool to control the created surfaces */
828 gboolean
gst_va_dmabuf_memories_setup(GstVaDisplay * display,GstVideoInfo * info,guint n_planes,GstMemory * mem[GST_VIDEO_MAX_PLANES],uintptr_t * fds,gsize offset[GST_VIDEO_MAX_PLANES],guint usage_hint)829 gst_va_dmabuf_memories_setup (GstVaDisplay * display, GstVideoInfo * info,
830 guint n_planes, GstMemory * mem[GST_VIDEO_MAX_PLANES],
831 uintptr_t * fds, gsize offset[GST_VIDEO_MAX_PLANES], guint usage_hint)
832 {
833 GstVideoFormat format;
834 GstVaBufferSurface *buf;
835 /* *INDENT-OFF* */
836 VASurfaceAttribExternalBuffers ext_buf = {
837 .width = GST_VIDEO_INFO_WIDTH (info),
838 .height = GST_VIDEO_INFO_HEIGHT (info),
839 .data_size = GST_VIDEO_INFO_SIZE (info),
840 .num_planes = GST_VIDEO_INFO_N_PLANES (info),
841 .buffers = fds,
842 .num_buffers = GST_VIDEO_INFO_N_PLANES (info),
843 };
844 /* *INDENT-ON* */
845 VASurfaceID surface;
846 guint32 fourcc, rt_format;
847 guint i;
848 gboolean ret;
849
850 g_return_val_if_fail (GST_IS_VA_DISPLAY (display), FALSE);
851 g_return_val_if_fail (n_planes > 0
852 && n_planes <= GST_VIDEO_MAX_PLANES, FALSE);
853
854 format = GST_VIDEO_INFO_FORMAT (info);
855 if (format == GST_VIDEO_FORMAT_UNKNOWN)
856 return FALSE;
857
858 rt_format = gst_va_chroma_from_video_format (format);
859 if (rt_format == 0)
860 return FALSE;
861
862 fourcc = gst_va_fourcc_from_video_format (format);
863 if (fourcc == 0)
864 return FALSE;
865
866 ext_buf.pixel_format = fourcc;
867
868 for (i = 0; i < n_planes; i++) {
869 ext_buf.pitches[i] = GST_VIDEO_INFO_PLANE_STRIDE (info, i);
870 ext_buf.offsets[i] = offset[i];
871 }
872
873 ret = va_create_surfaces (display, rt_format, ext_buf.pixel_format,
874 ext_buf.width, ext_buf.height, usage_hint, &ext_buf, &surface, 1);
875 if (!ret)
876 return FALSE;
877
878 GST_LOG_OBJECT (display, "Created surface %#x [%dx%d]", surface,
879 ext_buf.width, ext_buf.height);
880
881 buf = gst_va_buffer_surface_new (surface, rt_format, ext_buf.width,
882 ext_buf.height);
883 buf->display = gst_object_ref (display);
884 buf->n_mems = n_planes;
885 memcpy (buf->mems, mem, sizeof (buf->mems));
886
887 for (i = 0; i < n_planes; i++) {
888 g_atomic_int_add (&buf->ref_count, 1);
889 gst_mini_object_set_qdata (GST_MINI_OBJECT (mem[i]),
890 gst_va_buffer_surface_quark (), buf, gst_va_buffer_surface_unref);
891 GST_INFO_OBJECT (display, "setting surface %#x to dmabuf fd %d",
892 buf->surface, gst_dmabuf_memory_get_fd (mem[i]));
893 }
894
895 return TRUE;
896 }
897
898 /*===================== GstVaAllocator / GstVaMemory =========================*/
899
900 struct _GstVaAllocator
901 {
902 GstAllocator parent;
903
904 GstVaDisplay *display;
905
906 gboolean use_derived;
907 GArray *surface_formats;
908
909 GstVideoFormat surface_format;
910 GstVideoFormat img_format;
911 guint32 fourcc;
912 guint32 rt_format;
913
914 GstVideoInfo derived_info;
915 GstVideoInfo info;
916 guint usage_hint;
917
918 GstVaSurfaceCopy *copy;
919
920 GstVaMemoryPool pool;
921 };
922
923 typedef struct _GstVaMemory GstVaMemory;
924 struct _GstVaMemory
925 {
926 GstMemory mem;
927
928 VASurfaceID surface;
929 GstVideoFormat surface_format;
930 VAImage image;
931 gpointer mapped_data;
932
933 GstMapFlags prev_mapflags;
934 gint map_count;
935
936 gboolean is_derived;
937 gboolean is_dirty;
938 GMutex lock;
939 };
940
941 G_DEFINE_TYPE_WITH_CODE (GstVaAllocator, gst_va_allocator, GST_TYPE_ALLOCATOR,
942 _init_debug_category ());
943
944 static gboolean _va_unmap (GstVaMemory * mem);
945
946 static void
gst_va_allocator_finalize(GObject * object)947 gst_va_allocator_finalize (GObject * object)
948 {
949 GstVaAllocator *self = GST_VA_ALLOCATOR (object);
950
951 g_clear_pointer (&self->copy, gst_va_surface_copy_free);
952 gst_va_memory_pool_finalize (&self->pool);
953 g_clear_pointer (&self->surface_formats, g_array_unref);
954 gst_clear_object (&self->display);
955
956 G_OBJECT_CLASS (gst_va_allocator_parent_class)->finalize (object);
957 }
958
959 static void
gst_va_allocator_dispose(GObject * object)960 gst_va_allocator_dispose (GObject * object)
961 {
962 GstVaAllocator *self = GST_VA_ALLOCATOR (object);
963
964 gst_va_memory_pool_flush_unlocked (&self->pool, self->display);
965 if (gst_va_memory_pool_surface_count (&self->pool) != 0) {
966 GST_WARNING_OBJECT (self, "Surfaces leaked: %d",
967 gst_va_memory_pool_surface_count (&self->pool));
968 }
969
970 G_OBJECT_CLASS (gst_va_allocator_parent_class)->dispose (object);
971 }
972
973 static void
_va_free(GstAllocator * allocator,GstMemory * mem)974 _va_free (GstAllocator * allocator, GstMemory * mem)
975 {
976 GstVaAllocator *self = GST_VA_ALLOCATOR (allocator);
977 GstVaMemory *va_mem = (GstVaMemory *) mem;
978
979 if (va_mem->mapped_data) {
980 g_warning (G_STRLOC ":%s: Freeing memory %p still mapped", G_STRFUNC,
981 va_mem);
982 _va_unmap (va_mem);
983 }
984
985 if (va_mem->surface != VA_INVALID_ID && mem->parent == NULL) {
986 GST_LOG_OBJECT (self, "Destroying surface %#x", va_mem->surface);
987 va_destroy_surfaces (self->display, &va_mem->surface, 1);
988 }
989
990 g_mutex_clear (&va_mem->lock);
991
992 g_slice_free (GstVaMemory, va_mem);
993 }
994
995 static void
gst_va_allocator_class_init(GstVaAllocatorClass * klass)996 gst_va_allocator_class_init (GstVaAllocatorClass * klass)
997 {
998 GstAllocatorClass *allocator_class = GST_ALLOCATOR_CLASS (klass);
999 GObjectClass *object_class = G_OBJECT_CLASS (klass);
1000
1001 object_class->dispose = gst_va_allocator_dispose;
1002 object_class->finalize = gst_va_allocator_finalize;
1003 allocator_class->free = _va_free;
1004 }
1005
1006 static inline void
_clean_mem(GstVaMemory * mem)1007 _clean_mem (GstVaMemory * mem)
1008 {
1009 memset (&mem->image, 0, sizeof (mem->image));
1010 mem->image.image_id = VA_INVALID_ID;
1011 mem->image.buf = VA_INVALID_ID;
1012
1013 mem->is_derived = TRUE;
1014 mem->is_dirty = FALSE;
1015 mem->prev_mapflags = 0;
1016 mem->mapped_data = NULL;
1017 }
1018
1019 static void
_reset_mem(GstVaMemory * mem,GstAllocator * allocator,gsize size)1020 _reset_mem (GstVaMemory * mem, GstAllocator * allocator, gsize size)
1021 {
1022 _clean_mem (mem);
1023 g_atomic_int_set (&mem->map_count, 0);
1024 g_mutex_init (&mem->lock);
1025
1026 gst_memory_init (GST_MEMORY_CAST (mem), 0, allocator, NULL, size,
1027 0 /* align */ , 0 /* offset */ , size);
1028 }
1029
1030 static inline void
_update_info(GstVideoInfo * info,const VAImage * image)1031 _update_info (GstVideoInfo * info, const VAImage * image)
1032 {
1033 guint i;
1034
1035 for (i = 0; i < image->num_planes; i++) {
1036 GST_VIDEO_INFO_PLANE_OFFSET (info, i) = image->offsets[i];
1037 GST_VIDEO_INFO_PLANE_STRIDE (info, i) = image->pitches[i];
1038 }
1039
1040 GST_VIDEO_INFO_SIZE (info) = image->data_size;
1041 }
1042
1043 static inline gboolean
_update_image_info(GstVaAllocator * va_allocator)1044 _update_image_info (GstVaAllocator * va_allocator)
1045 {
1046 VASurfaceID surface;
1047 VAImage image = {.image_id = VA_INVALID_ID, };
1048
1049 /* Create a test surface first */
1050 if (!va_create_surfaces (va_allocator->display, va_allocator->rt_format,
1051 va_allocator->fourcc, GST_VIDEO_INFO_WIDTH (&va_allocator->info),
1052 GST_VIDEO_INFO_HEIGHT (&va_allocator->info), va_allocator->usage_hint,
1053 NULL, &surface, 1)) {
1054 GST_ERROR_OBJECT (va_allocator, "Failed to create a test surface");
1055 return FALSE;
1056 }
1057
1058 GST_DEBUG_OBJECT (va_allocator, "Created surface %#x [%dx%d]", surface,
1059 GST_VIDEO_INFO_WIDTH (&va_allocator->info),
1060 GST_VIDEO_INFO_HEIGHT (&va_allocator->info));
1061
1062 /* Try derived first, but different formats can never derive */
1063 if (va_allocator->surface_format == va_allocator->img_format) {
1064 if (va_get_derive_image (va_allocator->display, surface, &image)) {
1065 va_allocator->use_derived = TRUE;
1066 va_allocator->derived_info = va_allocator->info;
1067 _update_info (&va_allocator->derived_info, &image);
1068 va_destroy_image (va_allocator->display, image.image_id);
1069 }
1070 image.image_id = VA_INVALID_ID; /* reset it */
1071 }
1072
1073 /* Then we try to create a image. */
1074 if (!va_create_image (va_allocator->display, va_allocator->img_format,
1075 GST_VIDEO_INFO_WIDTH (&va_allocator->info),
1076 GST_VIDEO_INFO_HEIGHT (&va_allocator->info), &image)) {
1077 va_destroy_surfaces (va_allocator->display, &surface, 1);
1078 return FALSE;
1079 }
1080
1081 _update_info (&va_allocator->info, &image);
1082 va_destroy_image (va_allocator->display, image.image_id);
1083 va_destroy_surfaces (va_allocator->display, &surface, 1);
1084
1085 return TRUE;
1086 }
1087
1088 static gpointer
_va_map_unlocked(GstVaMemory * mem,GstMapFlags flags)1089 _va_map_unlocked (GstVaMemory * mem, GstMapFlags flags)
1090 {
1091 GstAllocator *allocator = GST_MEMORY_CAST (mem)->allocator;
1092 GstVideoInfo *info;
1093 GstVaAllocator *va_allocator;
1094 GstVaDisplay *display;
1095 gboolean use_derived;
1096
1097 g_return_val_if_fail (mem->surface != VA_INVALID_ID, NULL);
1098 g_return_val_if_fail (GST_IS_VA_ALLOCATOR (allocator), NULL);
1099
1100 if (g_atomic_int_get (&mem->map_count) > 0) {
1101 if (!(mem->prev_mapflags & flags) || !mem->mapped_data)
1102 return NULL;
1103 else
1104 goto success;
1105 }
1106
1107 va_allocator = GST_VA_ALLOCATOR (allocator);
1108 display = va_allocator->display;
1109
1110 if (flags & GST_MAP_WRITE) {
1111 mem->is_dirty = TRUE;
1112 } else { /* GST_MAP_READ only */
1113 mem->is_dirty = FALSE;
1114 }
1115
1116 if (flags & GST_MAP_VA) {
1117 mem->mapped_data = &mem->surface;
1118 goto success;
1119 }
1120
1121 switch (gst_va_display_get_implementation (display)) {
1122 case GST_VA_IMPLEMENTATION_INTEL_IHD:
1123 /* On Gen7+ Intel graphics the memory is mappable but not
1124 * cached, so normal memcpy() access is very slow to read, but
1125 * it's ok for writing. So let's assume that users won't prefer
1126 * direct-mapped memory if they request read access. */
1127 use_derived = va_allocator->use_derived && !(flags & GST_MAP_READ);
1128 break;
1129 case GST_VA_IMPLEMENTATION_INTEL_I965:
1130 /* YUV derived images are tiled, so writing them is also
1131 * problematic */
1132 use_derived = va_allocator->use_derived && !((flags & GST_MAP_READ)
1133 || ((flags & GST_MAP_WRITE)
1134 && GST_VIDEO_INFO_IS_YUV (&va_allocator->derived_info)));
1135 break;
1136 case GST_VA_IMPLEMENTATION_MESA_GALLIUM:
1137 /* Reading RGB derived images, with non-standard resolutions,
1138 * looks like tiled too. TODO(victor): fill a bug in Mesa. */
1139 use_derived = va_allocator->use_derived && !((flags & GST_MAP_READ)
1140 && GST_VIDEO_INFO_IS_RGB (&va_allocator->derived_info));
1141 break;
1142 default:
1143 use_derived = va_allocator->use_derived;
1144 break;
1145 }
1146 if (use_derived)
1147 info = &va_allocator->derived_info;
1148 else
1149 info = &va_allocator->info;
1150
1151 if (!va_ensure_image (display, mem->surface, info, &mem->image, use_derived))
1152 return NULL;
1153
1154 mem->is_derived = use_derived;
1155
1156 if (!mem->is_derived) {
1157 if (!va_get_image (display, mem->surface, &mem->image))
1158 goto fail;
1159 }
1160
1161 if (!va_map_buffer (display, mem->image.buf, &mem->mapped_data))
1162 goto fail;
1163
1164 success:
1165 {
1166 mem->prev_mapflags = flags;
1167 g_atomic_int_add (&mem->map_count, 1);
1168 return mem->mapped_data;
1169 }
1170
1171 fail:
1172 {
1173 va_destroy_image (display, mem->image.image_id);
1174 _clean_mem (mem);
1175 return NULL;
1176 }
1177 }
1178
1179 static gpointer
_va_map(GstVaMemory * mem,gsize maxsize,GstMapFlags flags)1180 _va_map (GstVaMemory * mem, gsize maxsize, GstMapFlags flags)
1181 {
1182 gpointer data;
1183
1184 g_mutex_lock (&mem->lock);
1185 data = _va_map_unlocked (mem, flags);
1186 g_mutex_unlock (&mem->lock);
1187
1188 return data;
1189 }
1190
1191 static gboolean
_va_unmap_unlocked(GstVaMemory * mem)1192 _va_unmap_unlocked (GstVaMemory * mem)
1193 {
1194 GstAllocator *allocator = GST_MEMORY_CAST (mem)->allocator;
1195 GstVaDisplay *display;
1196 gboolean ret = TRUE;
1197
1198 if (!g_atomic_int_dec_and_test (&mem->map_count))
1199 return TRUE;
1200
1201 if (mem->prev_mapflags & GST_MAP_VA)
1202 goto bail;
1203
1204 display = GST_VA_ALLOCATOR (allocator)->display;
1205
1206 if (mem->image.image_id != VA_INVALID_ID) {
1207 if (mem->is_dirty && !mem->is_derived) {
1208 ret = va_put_image (display, mem->surface, &mem->image);
1209 mem->is_dirty = FALSE;
1210 }
1211 /* XXX(victor): if is derived and is dirty, create another surface
1212 * an replace it in mem */
1213 }
1214
1215 ret &= va_unmap_buffer (display, mem->image.buf);
1216 ret &= va_destroy_image (display, mem->image.image_id);
1217
1218 bail:
1219 _clean_mem (mem);
1220
1221 return ret;
1222 }
1223
1224 static gboolean
_va_unmap(GstVaMemory * mem)1225 _va_unmap (GstVaMemory * mem)
1226 {
1227 gboolean ret;
1228
1229 g_mutex_lock (&mem->lock);
1230 ret = _va_unmap_unlocked (mem);
1231 g_mutex_unlock (&mem->lock);
1232
1233 return ret;
1234 }
1235
1236 static GstMemory *
_va_share(GstMemory * mem,gssize offset,gssize size)1237 _va_share (GstMemory * mem, gssize offset, gssize size)
1238 {
1239 GstVaMemory *vamem = (GstVaMemory *) mem;
1240 GstVaMemory *sub;
1241 GstMemory *parent;
1242
1243 GST_DEBUG ("%p: share %" G_GSSIZE_FORMAT ", %" G_GSIZE_FORMAT, mem, offset,
1244 size);
1245
1246 /* find real parent */
1247 if ((parent = vamem->mem.parent) == NULL)
1248 parent = (GstMemory *) vamem;
1249
1250 if (size == -1)
1251 size = mem->maxsize - offset;
1252
1253 sub = g_slice_new (GstVaMemory);
1254
1255 /* the shared memory is alwyas readonly */
1256 gst_memory_init (GST_MEMORY_CAST (sub), GST_MINI_OBJECT_FLAGS (parent) |
1257 GST_MINI_OBJECT_FLAG_LOCK_READONLY, vamem->mem.allocator, parent,
1258 vamem->mem.maxsize, vamem->mem.align, vamem->mem.offset + offset, size);
1259
1260 sub->surface = vamem->surface;
1261 sub->surface_format = vamem->surface_format;
1262
1263 _clean_mem (sub);
1264
1265 g_atomic_int_set (&sub->map_count, 0);
1266 g_mutex_init (&sub->lock);
1267
1268 return GST_MEMORY_CAST (sub);
1269 }
1270
1271 /* XXX(victor): deep copy implementation. */
1272 static GstMemory *
_va_copy(GstMemory * mem,gssize offset,gssize size)1273 _va_copy (GstMemory * mem, gssize offset, gssize size)
1274 {
1275 GstMemory *copy;
1276 GstMapInfo sinfo, dinfo;
1277 GstVaAllocator *va_allocator = GST_VA_ALLOCATOR (mem->allocator);
1278 GstVaMemory *va_copy, *va_mem = (GstVaMemory *) mem;
1279 gsize mem_size;
1280
1281 GST_DEBUG ("%p: copy %" G_GSSIZE_FORMAT ", %" G_GSIZE_FORMAT, mem, offset,
1282 size);
1283
1284 {
1285 GST_VA_MEMORY_POOL_LOCK (&va_allocator->pool);
1286 copy = gst_va_memory_pool_pop (&va_allocator->pool);
1287 GST_VA_MEMORY_POOL_UNLOCK (&va_allocator->pool);
1288
1289 if (!copy) {
1290 copy = gst_va_allocator_alloc (mem->allocator);
1291 if (!copy) {
1292 GST_WARNING ("failed to allocate new memory");
1293 return NULL;
1294 }
1295 } else {
1296 gst_object_ref (mem->allocator);
1297 }
1298 }
1299
1300 va_copy = (GstVaMemory *) copy;
1301 mem_size = gst_memory_get_sizes (mem, NULL, NULL);
1302
1303 if (size == -1)
1304 size = mem_size > offset ? mem_size - offset : 0;
1305
1306 if (offset == 0 && size == mem_size) {
1307 GstVaSurfaceCopy *copy_func;
1308
1309 copy_func = _ensure_surface_copy (&va_allocator->copy,
1310 va_allocator->display, &va_allocator->info);
1311 if (copy_func
1312 && gst_va_surface_copy (copy_func, va_copy->surface, va_mem->surface))
1313 return copy;
1314 }
1315
1316 if (!gst_memory_map (mem, &sinfo, GST_MAP_READ)) {
1317 GST_WARNING ("failed to map memory to copy");
1318 return NULL;
1319 }
1320
1321 if (!gst_memory_map (copy, &dinfo, GST_MAP_WRITE)) {
1322 GST_WARNING ("could not write map memory %p", copy);
1323 gst_allocator_free (mem->allocator, copy);
1324 gst_memory_unmap (mem, &sinfo);
1325 return NULL;
1326 }
1327
1328 memcpy (dinfo.data, sinfo.data + offset, size);
1329 gst_memory_unmap (copy, &dinfo);
1330 gst_memory_unmap (mem, &sinfo);
1331
1332 return copy;
1333 }
1334
1335 static void
gst_va_allocator_init(GstVaAllocator * self)1336 gst_va_allocator_init (GstVaAllocator * self)
1337 {
1338 GstAllocator *allocator = GST_ALLOCATOR (self);
1339
1340 allocator->mem_type = GST_ALLOCATOR_VASURFACE;
1341 allocator->mem_map = (GstMemoryMapFunction) _va_map;
1342 allocator->mem_unmap = (GstMemoryUnmapFunction) _va_unmap;
1343 allocator->mem_share = _va_share;
1344 allocator->mem_copy = _va_copy;
1345
1346 gst_va_memory_pool_init (&self->pool);
1347
1348 GST_OBJECT_FLAG_SET (self, GST_ALLOCATOR_FLAG_CUSTOM_ALLOC);
1349 }
1350
1351 static gboolean
gst_va_memory_release(GstMiniObject * mini_object)1352 gst_va_memory_release (GstMiniObject * mini_object)
1353 {
1354 GstMemory *mem = GST_MEMORY_CAST (mini_object);
1355 GstVaAllocator *self = GST_VA_ALLOCATOR (mem->allocator);
1356
1357 GST_LOG ("releasing %p: surface %#x", mem, gst_va_memory_get_surface (mem));
1358
1359 gst_va_memory_pool_push (&self->pool, mem);
1360
1361 /* Keep last in case we are holding on the last allocator ref */
1362 gst_object_unref (mem->allocator);
1363
1364 /* don't call mini_object's free */
1365 return FALSE;
1366 }
1367
1368 GstMemory *
gst_va_allocator_alloc(GstAllocator * allocator)1369 gst_va_allocator_alloc (GstAllocator * allocator)
1370 {
1371 GstVaAllocator *self;
1372 GstVaMemory *mem;
1373 VASurfaceID surface;
1374
1375 g_return_val_if_fail (GST_IS_VA_ALLOCATOR (allocator), NULL);
1376
1377 self = GST_VA_ALLOCATOR (allocator);
1378
1379 if (self->rt_format == 0) {
1380 GST_ERROR_OBJECT (self, "Unknown fourcc or chroma format");
1381 return NULL;
1382 }
1383
1384 if (!va_create_surfaces (self->display, self->rt_format, self->fourcc,
1385 GST_VIDEO_INFO_WIDTH (&self->info),
1386 GST_VIDEO_INFO_HEIGHT (&self->info), self->usage_hint, NULL,
1387 &surface, 1))
1388 return NULL;
1389
1390 mem = g_slice_new (GstVaMemory);
1391
1392 mem->surface = surface;
1393 mem->surface_format = self->surface_format;
1394
1395 _reset_mem (mem, allocator, GST_VIDEO_INFO_SIZE (&self->info));
1396
1397 GST_MINI_OBJECT (mem)->dispose = gst_va_memory_release;
1398 gst_va_memory_pool_surface_inc (&self->pool);
1399
1400 GST_LOG_OBJECT (self, "Created surface %#x [%dx%d]", mem->surface,
1401 GST_VIDEO_INFO_WIDTH (&self->info), GST_VIDEO_INFO_HEIGHT (&self->info));
1402
1403 return GST_MEMORY_CAST (mem);
1404 }
1405
1406 GstAllocator *
gst_va_allocator_new(GstVaDisplay * display,GArray * surface_formats)1407 gst_va_allocator_new (GstVaDisplay * display, GArray * surface_formats)
1408 {
1409 GstVaAllocator *self;
1410
1411 g_return_val_if_fail (GST_IS_VA_DISPLAY (display), NULL);
1412
1413 self = g_object_new (GST_TYPE_VA_ALLOCATOR, NULL);
1414 self->display = gst_object_ref (display);
1415 self->surface_formats = surface_formats;
1416 gst_object_ref_sink (self);
1417
1418 return GST_ALLOCATOR (self);
1419 }
1420
1421 gboolean
gst_va_allocator_setup_buffer(GstAllocator * allocator,GstBuffer * buffer)1422 gst_va_allocator_setup_buffer (GstAllocator * allocator, GstBuffer * buffer)
1423 {
1424 GstMemory *mem = gst_va_allocator_alloc (allocator);
1425 if (!mem)
1426 return FALSE;
1427
1428 gst_buffer_append_memory (buffer, mem);
1429 return TRUE;
1430 }
1431
1432 static VASurfaceID
gst_va_allocator_prepare_buffer_unlocked(GstVaAllocator * self,GstBuffer * buffer)1433 gst_va_allocator_prepare_buffer_unlocked (GstVaAllocator * self,
1434 GstBuffer * buffer)
1435 {
1436 GstMemory *mem;
1437 VASurfaceID surface;
1438
1439 mem = gst_va_memory_pool_pop (&self->pool);
1440 if (!mem)
1441 return VA_INVALID_ID;
1442
1443 gst_object_ref (mem->allocator);
1444 surface = gst_va_memory_get_surface (mem);
1445 gst_buffer_append_memory (buffer, mem);
1446
1447 GST_LOG ("buffer %p: memory %p - surface %#x", buffer, mem, surface);
1448
1449 return surface;
1450 }
1451
1452 gboolean
gst_va_allocator_prepare_buffer(GstAllocator * allocator,GstBuffer * buffer)1453 gst_va_allocator_prepare_buffer (GstAllocator * allocator, GstBuffer * buffer)
1454 {
1455 GstVaAllocator *self = GST_VA_ALLOCATOR (allocator);
1456 VASurfaceID surface;
1457
1458 GST_VA_MEMORY_POOL_LOCK (&self->pool);
1459 surface = gst_va_allocator_prepare_buffer_unlocked (self, buffer);
1460 GST_VA_MEMORY_POOL_UNLOCK (&self->pool);
1461
1462 return (surface != VA_INVALID_ID);
1463 }
1464
1465 void
gst_va_allocator_flush(GstAllocator * allocator)1466 gst_va_allocator_flush (GstAllocator * allocator)
1467 {
1468 GstVaAllocator *self = GST_VA_ALLOCATOR (allocator);
1469
1470 gst_va_memory_pool_flush (&self->pool, self->display);
1471 }
1472
1473 static gboolean
gst_va_allocator_try(GstAllocator * allocator)1474 gst_va_allocator_try (GstAllocator * allocator)
1475 {
1476 GstVaAllocator *self = GST_VA_ALLOCATOR (allocator);
1477
1478 self->fourcc = 0;
1479 self->rt_format = 0;
1480 self->use_derived = FALSE;
1481 self->img_format = GST_VIDEO_INFO_FORMAT (&self->info);
1482
1483 self->surface_format =
1484 gst_va_video_surface_format_from_image_format (self->img_format,
1485 self->surface_formats);
1486 if (self->surface_format == GST_VIDEO_FORMAT_UNKNOWN) {
1487 /* try a surface without fourcc but rt_format only */
1488 self->fourcc = 0;
1489 self->rt_format = gst_va_chroma_from_video_format (self->img_format);
1490 } else {
1491 self->fourcc = gst_va_fourcc_from_video_format (self->surface_format);
1492 self->rt_format = gst_va_chroma_from_video_format (self->surface_format);
1493 }
1494
1495 if (self->rt_format == 0) {
1496 GST_ERROR_OBJECT (allocator, "Unsupported image format: %s",
1497 gst_video_format_to_string (self->img_format));
1498 return FALSE;
1499 }
1500
1501 if (!_update_image_info (self)) {
1502 GST_ERROR_OBJECT (allocator, "Failed to update allocator info");
1503 return FALSE;
1504 }
1505
1506 GST_INFO_OBJECT (self,
1507 "va allocator info, surface format: %s, image format: %s, "
1508 "use derived: %s, rt format: 0x%x, fourcc: %" GST_FOURCC_FORMAT,
1509 (self->surface_format == GST_VIDEO_FORMAT_UNKNOWN) ? "unknown"
1510 : gst_video_format_to_string (self->surface_format),
1511 gst_video_format_to_string (self->img_format),
1512 self->use_derived ? "true" : "false", self->rt_format,
1513 GST_FOURCC_ARGS (self->fourcc));
1514 return TRUE;
1515 }
1516
1517 gboolean
gst_va_allocator_set_format(GstAllocator * allocator,GstVideoInfo * info,guint usage_hint)1518 gst_va_allocator_set_format (GstAllocator * allocator, GstVideoInfo * info,
1519 guint usage_hint)
1520 {
1521 GstVaAllocator *self;
1522 gboolean ret;
1523
1524 g_return_val_if_fail (GST_IS_VA_ALLOCATOR (allocator), FALSE);
1525 g_return_val_if_fail (info, FALSE);
1526
1527 self = GST_VA_ALLOCATOR (allocator);
1528
1529 if (gst_va_memory_pool_surface_count (&self->pool) != 0) {
1530 if (GST_VIDEO_INFO_FORMAT (info) == GST_VIDEO_INFO_FORMAT (&self->info)
1531 && GST_VIDEO_INFO_WIDTH (info) == GST_VIDEO_INFO_WIDTH (&self->info)
1532 && GST_VIDEO_INFO_HEIGHT (info) == GST_VIDEO_INFO_HEIGHT (&self->info)
1533 && usage_hint == self->usage_hint) {
1534 *info = self->info; /* update callee info (offset & stride) */
1535 return TRUE;
1536 }
1537 return FALSE;
1538 }
1539
1540 self->usage_hint = usage_hint;
1541 self->info = *info;
1542
1543 g_clear_pointer (&self->copy, gst_va_surface_copy_free);
1544
1545 ret = gst_va_allocator_try (allocator);
1546 if (ret)
1547 *info = self->info;
1548
1549 return ret;
1550 }
1551
1552 gboolean
gst_va_allocator_get_format(GstAllocator * allocator,GstVideoInfo * info,guint * usage_hint)1553 gst_va_allocator_get_format (GstAllocator * allocator, GstVideoInfo * info,
1554 guint * usage_hint)
1555 {
1556 GstVaAllocator *self = GST_VA_ALLOCATOR (allocator);
1557
1558 if (GST_VIDEO_INFO_FORMAT (&self->info) == GST_VIDEO_FORMAT_UNKNOWN)
1559 return FALSE;
1560
1561 if (info)
1562 *info = self->info;
1563 if (usage_hint)
1564 *usage_hint = self->usage_hint;
1565
1566 return TRUE;
1567 }
1568
1569 /*============ Utilities =====================================================*/
1570
1571 VASurfaceID
gst_va_memory_get_surface(GstMemory * mem)1572 gst_va_memory_get_surface (GstMemory * mem)
1573 {
1574 VASurfaceID surface = VA_INVALID_ID;
1575
1576 if (!mem->allocator)
1577 return VA_INVALID_ID;
1578
1579 if (GST_IS_DMABUF_ALLOCATOR (mem->allocator)) {
1580 GstVaBufferSurface *buf;
1581
1582 buf = gst_mini_object_get_qdata (GST_MINI_OBJECT (mem),
1583 gst_va_buffer_surface_quark ());
1584 if (buf)
1585 surface = buf->surface;
1586 } else if (GST_IS_VA_ALLOCATOR (mem->allocator)) {
1587 GstVaMemory *va_mem = (GstVaMemory *) mem;
1588 surface = va_mem->surface;
1589 }
1590
1591 return surface;
1592 }
1593
1594 VASurfaceID
gst_va_buffer_get_surface(GstBuffer * buffer)1595 gst_va_buffer_get_surface (GstBuffer * buffer)
1596 {
1597 GstMemory *mem;
1598
1599 mem = gst_buffer_peek_memory (buffer, 0);
1600 if (!mem)
1601 return VA_INVALID_ID;
1602
1603 return gst_va_memory_get_surface (mem);
1604 }
1605
1606 gboolean
gst_va_buffer_create_aux_surface(GstBuffer * buffer)1607 gst_va_buffer_create_aux_surface (GstBuffer * buffer)
1608 {
1609 GstMemory *mem;
1610 VASurfaceID surface = VA_INVALID_ID;
1611 GstVaDisplay *display = NULL;
1612 GstVideoFormat format;
1613 gint width, height;
1614 GstVaBufferSurface *surface_buffer;
1615
1616 mem = gst_buffer_peek_memory (buffer, 0);
1617 if (!mem)
1618 return FALSE;
1619
1620 /* Already created it. */
1621 surface_buffer = gst_mini_object_get_qdata (GST_MINI_OBJECT (mem),
1622 gst_va_buffer_aux_surface_quark ());
1623 if (surface_buffer)
1624 return TRUE;
1625
1626 if (!mem->allocator)
1627 return FALSE;
1628
1629 if (GST_IS_VA_DMABUF_ALLOCATOR (mem->allocator)) {
1630 GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (mem->allocator);
1631 guint32 fourcc, rt_format;
1632
1633 format = GST_VIDEO_INFO_FORMAT (&self->info);
1634 fourcc = gst_va_fourcc_from_video_format (format);
1635 rt_format = gst_va_chroma_from_video_format (format);
1636 if (fourcc == 0 || rt_format == 0) {
1637 GST_ERROR_OBJECT (self, "Unsupported format: %s",
1638 gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (&self->info)));
1639 return FALSE;
1640 }
1641
1642 display = self->display;
1643 width = GST_VIDEO_INFO_WIDTH (&self->info);
1644 height = GST_VIDEO_INFO_HEIGHT (&self->info);
1645 if (!va_create_surfaces (self->display, rt_format, fourcc,
1646 GST_VIDEO_INFO_WIDTH (&self->info),
1647 GST_VIDEO_INFO_HEIGHT (&self->info), self->usage_hint, NULL,
1648 &surface, 1))
1649 return FALSE;
1650 } else if (GST_IS_VA_ALLOCATOR (mem->allocator)) {
1651 GstVaAllocator *self = GST_VA_ALLOCATOR (mem->allocator);
1652
1653 if (self->rt_format == 0) {
1654 GST_ERROR_OBJECT (self, "Unknown fourcc or chroma format");
1655 return FALSE;
1656 }
1657
1658 display = self->display;
1659 width = GST_VIDEO_INFO_WIDTH (&self->info);
1660 height = GST_VIDEO_INFO_HEIGHT (&self->info);
1661 format = GST_VIDEO_INFO_FORMAT (&self->info);
1662 if (!va_create_surfaces (self->display, self->rt_format, self->fourcc,
1663 GST_VIDEO_INFO_WIDTH (&self->info),
1664 GST_VIDEO_INFO_HEIGHT (&self->info), self->usage_hint, NULL,
1665 &surface, 1))
1666 return FALSE;
1667 } else {
1668 g_assert_not_reached ();
1669 }
1670
1671 if (!display || surface == VA_INVALID_ID)
1672 return FALSE;
1673
1674 surface_buffer = gst_va_buffer_surface_new (surface, format, width, height);
1675 surface_buffer->display = gst_object_ref (display);
1676 g_atomic_int_add (&surface_buffer->ref_count, 1);
1677
1678 gst_mini_object_set_qdata (GST_MINI_OBJECT (mem),
1679 gst_va_buffer_aux_surface_quark (), surface_buffer,
1680 gst_va_buffer_surface_unref);
1681
1682 return TRUE;
1683 }
1684
1685 VASurfaceID
gst_va_buffer_get_aux_surface(GstBuffer * buffer)1686 gst_va_buffer_get_aux_surface (GstBuffer * buffer)
1687 {
1688 GstVaBufferSurface *surface_buffer;
1689 GstMemory *mem;
1690
1691 mem = gst_buffer_peek_memory (buffer, 0);
1692 if (!mem)
1693 return VA_INVALID_ID;
1694
1695 surface_buffer = gst_mini_object_get_qdata (GST_MINI_OBJECT (mem),
1696 gst_va_buffer_aux_surface_quark ());
1697 if (!surface_buffer)
1698 return VA_INVALID_ID;
1699
1700 /* No one increments it, and its lifetime is the same with the
1701 gstmemory itself */
1702 g_assert (g_atomic_int_get (&surface_buffer->ref_count) == 1);
1703
1704 return surface_buffer->surface;
1705 }
1706