1 /*
2 * Initially based on gst-omx/omx/gstomxvideodec.c
3 *
4 * Copyright (C) 2011, Hewlett-Packard Development Company, L.P.
5 * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>, Collabora Ltd.
6 *
7 * Copyright (C) 2012, Collabora Ltd.
8 * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
9 *
10 * Copyright (C) 2012, Rafaël Carré <funman@videolanorg>
11 *
12 * Copyright (C) 2015, Sebastian Dröge <sebastian@centricular.com>
13 *
14 * Copyright (C) 2014-2015, Collabora Ltd.
15 * Author: Matthieu Bouron <matthieu.bouron@gcollabora.com>
16 *
17 * Copyright (C) 2015, Edward Hervey
18 * Author: Edward Hervey <bilboed@gmail.com>
19 *
20 * Copyright (C) 2015, Matthew Waters <matthew@centricular.com>
21 *
22 * This library is free software; you can redistribute it and/or
23 * modify it under the terms of the GNU Lesser General Public
24 * License as published by the Free Software Foundation
25 * version 2.1 of the License.
26 *
27 * This library is distributed in the hope that it will be useful,
28 * but WITHOUT ANY WARRANTY; without even the implied warranty of
29 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
30 * Lesser General Public License for more details.
31 *
32 * You should have received a copy of the GNU Lesser General Public
33 * License along with this library; if not, write to the Free Software
34 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
35 *
36 */
37
38 #ifdef HAVE_CONFIG_H
39 #include "config.h"
40 #endif
41
42 #include <gst/gst.h>
43 #include <gst/gl/gl.h>
44 #include <gst/video/gstvideometa.h>
45 #include <gst/video/gstvideoaffinetransformationmeta.h>
46 #include <gst/video/gstvideopool.h>
47 #include <string.h>
48
49 #ifdef HAVE_ORC
50 #include <orc/orc.h>
51 #else
52 #define orc_memcpy memcpy
53 #endif
54
55 #include "gstamcvideodec.h"
56 #include "gstamc-constants.h"
57
58 GST_DEBUG_CATEGORY_STATIC (gst_amc_video_dec_debug_category);
59 #define GST_CAT_DEFAULT gst_amc_video_dec_debug_category
60
61 #define GST_VIDEO_DECODER_ERROR_FROM_ERROR(el, err) G_STMT_START { \
62 gchar *__dbg = g_strdup (err->message); \
63 GstVideoDecoder *__dec = GST_VIDEO_DECODER (el); \
64 GST_WARNING_OBJECT (el, "error: %s", __dbg); \
65 _gst_video_decoder_error (__dec, 1, \
66 err->domain, err->code, \
67 NULL, __dbg, __FILE__, GST_FUNCTION, __LINE__); \
68 g_clear_error (&err); \
69 } G_STMT_END
70
71 typedef struct _BufferIdentification BufferIdentification;
72 struct _BufferIdentification
73 {
74 guint64 timestamp;
75 };
76
77 struct gl_sync_result
78 {
79 gint refcount;
80 gint64 frame_available_ts;
81 gboolean updated; /* only every call update_tex_image once */
82 gboolean released; /* only every call release_output_buffer once */
83 gboolean rendered; /* whether the release resulted in a render */
84 };
85
86 static struct gl_sync_result *
_gl_sync_result_ref(struct gl_sync_result * result)87 _gl_sync_result_ref (struct gl_sync_result *result)
88 {
89 g_assert (result != NULL);
90
91 g_atomic_int_inc (&result->refcount);
92
93 GST_TRACE ("gl_sync result %p ref", result);
94
95 return result;
96 }
97
98 static void
_gl_sync_result_unref(struct gl_sync_result * result)99 _gl_sync_result_unref (struct gl_sync_result *result)
100 {
101 g_assert (result != NULL);
102
103 GST_TRACE ("gl_sync result %p unref", result);
104
105 if (g_atomic_int_dec_and_test (&result->refcount)) {
106 GST_TRACE ("freeing gl_sync result %p", result);
107 g_free (result);
108 }
109 }
110
111 struct gl_sync
112 {
113 gint refcount;
114 GstAmcVideoDec *sink; /* back reference for statistics, lock, cond, etc */
115 gint buffer_idx; /* idx of the AMC buffer we should render */
116 GstBuffer *buffer; /* back reference to the buffer */
117 GstGLMemory *oes_mem; /* where amc is rendering into. The same for every gl_sync */
118 GstAmcSurfaceTexture *surface; /* java wrapper for where amc is rendering into */
119 guint gl_frame_no; /* effectively the frame id */
120 gint64 released_ts; /* microseconds from g_get_monotonic_time() */
121 struct gl_sync_result *result;
122 };
123
124 static struct gl_sync *
_gl_sync_ref(struct gl_sync * sync)125 _gl_sync_ref (struct gl_sync *sync)
126 {
127 g_assert (sync != NULL);
128
129 g_atomic_int_inc (&sync->refcount);
130
131 GST_TRACE ("gl_sync %p ref", sync);
132
133 return sync;
134 }
135
136 static void
_gl_sync_unref(struct gl_sync * sync)137 _gl_sync_unref (struct gl_sync *sync)
138 {
139 g_assert (sync != NULL);
140
141 GST_TRACE ("gl_sync %p unref", sync);
142
143 if (g_atomic_int_dec_and_test (&sync->refcount)) {
144 GST_TRACE ("freeing gl_sync %p", sync);
145
146 _gl_sync_result_unref (sync->result);
147
148 g_object_unref (sync->sink);
149 g_object_unref (sync->surface);
150 gst_memory_unref ((GstMemory *) sync->oes_mem);
151
152 g_free (sync);
153 }
154 }
155
156 static gint
_queue_compare_gl_sync(gconstpointer a,gconstpointer b)157 _queue_compare_gl_sync (gconstpointer a, gconstpointer b)
158 {
159 const struct gl_sync *sync = a;
160 guint frame = GPOINTER_TO_INT (b);
161
162 return sync->gl_frame_no - frame;
163 }
164
165 static GList *
_find_gl_sync_for_frame(GstAmcVideoDec * dec,guint frame)166 _find_gl_sync_for_frame (GstAmcVideoDec * dec, guint frame)
167 {
168 return g_queue_find_custom (dec->gl_queue, GINT_TO_POINTER (frame),
169 (GCompareFunc) _queue_compare_gl_sync);
170 }
171
172 static void
_attach_mem_to_context(GstGLContext * context,GstAmcVideoDec * self)173 _attach_mem_to_context (GstGLContext * context, GstAmcVideoDec * self)
174 {
175 GST_TRACE_OBJECT (self, "attaching texture %p id %u to current context",
176 self->surface, self->oes_mem->tex_id);
177 if (!gst_amc_surface_texture_attach_to_gl_context (self->surface,
178 self->oes_mem->tex_id, &self->gl_error)) {
179 GST_ERROR_OBJECT (self, "Failed to attach texture to the GL context");
180 GST_ELEMENT_ERROR_FROM_ERROR (self, self->gl_error);
181 } else {
182 self->gl_mem_attached = TRUE;
183 }
184 }
185
186 static void
_dettach_mem_from_context(GstGLContext * context,GstAmcVideoDec * self)187 _dettach_mem_from_context (GstGLContext * context, GstAmcVideoDec * self)
188 {
189 if (self->surface) {
190 guint tex_id = self->oes_mem ? self->oes_mem->tex_id : 0;
191
192 GST_TRACE_OBJECT (self, "detaching texture %p id %u from current context",
193 self->surface, tex_id);
194
195 if (!gst_amc_surface_texture_detach_from_gl_context (self->surface,
196 &self->gl_error)) {
197 GST_ERROR_OBJECT (self, "Failed to attach texture to the GL context");
198 GST_ELEMENT_ERROR_FROM_ERROR (self, self->gl_error);
199 }
200 }
201 self->gl_mem_attached = FALSE;
202 }
203
204 static BufferIdentification *
buffer_identification_new(GstClockTime timestamp)205 buffer_identification_new (GstClockTime timestamp)
206 {
207 BufferIdentification *id = g_slice_new (BufferIdentification);
208
209 id->timestamp = timestamp;
210
211 return id;
212 }
213
214 static void
buffer_identification_free(BufferIdentification * id)215 buffer_identification_free (BufferIdentification * id)
216 {
217 g_slice_free (BufferIdentification, id);
218 }
219
220 /* prototypes */
221 static void gst_amc_video_dec_finalize (GObject * object);
222
223 static GstStateChangeReturn
224 gst_amc_video_dec_change_state (GstElement * element,
225 GstStateChange transition);
226 static void gst_amc_video_dec_set_context (GstElement * element,
227 GstContext * context);
228
229 static gboolean gst_amc_video_dec_open (GstVideoDecoder * decoder);
230 static gboolean gst_amc_video_dec_close (GstVideoDecoder * decoder);
231 static gboolean gst_amc_video_dec_start (GstVideoDecoder * decoder);
232 static gboolean gst_amc_video_dec_stop (GstVideoDecoder * decoder);
233 static gboolean gst_amc_video_dec_set_format (GstVideoDecoder * decoder,
234 GstVideoCodecState * state);
235 static gboolean gst_amc_video_dec_flush (GstVideoDecoder * decoder);
236 static GstFlowReturn gst_amc_video_dec_handle_frame (GstVideoDecoder * decoder,
237 GstVideoCodecFrame * frame);
238 static GstFlowReturn gst_amc_video_dec_finish (GstVideoDecoder * decoder);
239 static gboolean gst_amc_video_dec_decide_allocation (GstVideoDecoder * bdec,
240 GstQuery * query);
241 static gboolean gst_amc_video_dec_src_query (GstVideoDecoder * bdec,
242 GstQuery * query);
243
244 static GstFlowReturn gst_amc_video_dec_drain (GstAmcVideoDec * self);
245 static gboolean gst_amc_video_dec_check_codec_config (GstAmcVideoDec * self);
246 static void
247 gst_amc_video_dec_on_frame_available (GstAmcSurfaceTexture * texture,
248 gpointer user_data);
249
250 enum
251 {
252 PROP_0
253 };
254
255 /* class initialization */
256
257 static void gst_amc_video_dec_class_init (GstAmcVideoDecClass * klass);
258 static void gst_amc_video_dec_init (GstAmcVideoDec * self);
259 static void gst_amc_video_dec_base_init (gpointer g_class);
260
261 static GstVideoDecoderClass *parent_class = NULL;
262
263 GType
gst_amc_video_dec_get_type(void)264 gst_amc_video_dec_get_type (void)
265 {
266 static gsize type = 0;
267
268 if (g_once_init_enter (&type)) {
269 GType _type;
270 static const GTypeInfo info = {
271 sizeof (GstAmcVideoDecClass),
272 gst_amc_video_dec_base_init,
273 NULL,
274 (GClassInitFunc) gst_amc_video_dec_class_init,
275 NULL,
276 NULL,
277 sizeof (GstAmcVideoDec),
278 0,
279 (GInstanceInitFunc) gst_amc_video_dec_init,
280 NULL
281 };
282
283 _type = g_type_register_static (GST_TYPE_VIDEO_DECODER, "GstAmcVideoDec",
284 &info, 0);
285
286 GST_DEBUG_CATEGORY_INIT (gst_amc_video_dec_debug_category, "amcvideodec", 0,
287 "Android MediaCodec video decoder");
288
289 g_once_init_leave (&type, _type);
290 }
291 return type;
292 }
293
294 static const gchar *
caps_to_mime(GstCaps * caps)295 caps_to_mime (GstCaps * caps)
296 {
297 GstStructure *s;
298 const gchar *name;
299
300 s = gst_caps_get_structure (caps, 0);
301 if (!s)
302 return NULL;
303
304 name = gst_structure_get_name (s);
305
306 if (strcmp (name, "video/mpeg") == 0) {
307 gint mpegversion;
308
309 if (!gst_structure_get_int (s, "mpegversion", &mpegversion))
310 return NULL;
311
312 if (mpegversion == 4)
313 return "video/mp4v-es";
314 else if (mpegversion == 1 || mpegversion == 2)
315 return "video/mpeg2";
316 } else if (strcmp (name, "video/x-h263") == 0) {
317 return "video/3gpp";
318 } else if (strcmp (name, "video/x-h264") == 0) {
319 return "video/avc";
320 } else if (strcmp (name, "video/x-h265") == 0) {
321 return "video/hevc";
322 } else if (strcmp (name, "video/x-vp8") == 0) {
323 return "video/x-vnd.on2.vp8";
324 } else if (strcmp (name, "video/x-vp9") == 0) {
325 return "video/x-vnd.on2.vp9";
326 } else if (strcmp (name, "video/x-divx") == 0) {
327 return "video/mp4v-es";
328 }
329
330 return NULL;
331 }
332
333 static void
gst_amc_video_dec_base_init(gpointer g_class)334 gst_amc_video_dec_base_init (gpointer g_class)
335 {
336 GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
337 GstAmcVideoDecClass *amcvideodec_class = GST_AMC_VIDEO_DEC_CLASS (g_class);
338 const GstAmcCodecInfo *codec_info;
339 GstPadTemplate *templ;
340 GstCaps *sink_caps, *src_caps, *all_src_caps;
341 gchar *longname;
342
343 codec_info =
344 g_type_get_qdata (G_TYPE_FROM_CLASS (g_class), gst_amc_codec_info_quark);
345 /* This happens for the base class and abstract subclasses */
346 if (!codec_info)
347 return;
348
349 amcvideodec_class->codec_info = codec_info;
350
351 gst_amc_codec_info_to_caps (codec_info, &sink_caps, &src_caps);
352
353 all_src_caps =
354 gst_caps_from_string ("video/x-raw(" GST_CAPS_FEATURE_MEMORY_GL_MEMORY
355 "), format = (string) RGBA, texture-target = (string) external-oes");
356
357 if (codec_info->gl_output_only) {
358 gst_caps_unref (src_caps);
359 } else {
360 gst_caps_append (all_src_caps, src_caps);
361 }
362
363 /* Add pad templates */
364 templ =
365 gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, sink_caps);
366 gst_element_class_add_pad_template (element_class, templ);
367 gst_caps_unref (sink_caps);
368
369 templ =
370 gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, all_src_caps);
371 gst_element_class_add_pad_template (element_class, templ);
372 gst_caps_unref (all_src_caps);
373
374 longname = g_strdup_printf ("Android MediaCodec %s", codec_info->name);
375 gst_element_class_set_metadata (element_class,
376 codec_info->name,
377 "Codec/Decoder/Video/Hardware",
378 longname, "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
379 g_free (longname);
380 }
381
382 static void
gst_amc_video_dec_class_init(GstAmcVideoDecClass * klass)383 gst_amc_video_dec_class_init (GstAmcVideoDecClass * klass)
384 {
385 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
386 GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
387 GstVideoDecoderClass *videodec_class = GST_VIDEO_DECODER_CLASS (klass);
388
389 parent_class = g_type_class_peek_parent (klass);
390
391 gobject_class->finalize = gst_amc_video_dec_finalize;
392
393 element_class->change_state =
394 GST_DEBUG_FUNCPTR (gst_amc_video_dec_change_state);
395 element_class->set_context =
396 GST_DEBUG_FUNCPTR (gst_amc_video_dec_set_context);
397
398 videodec_class->start = GST_DEBUG_FUNCPTR (gst_amc_video_dec_start);
399 videodec_class->stop = GST_DEBUG_FUNCPTR (gst_amc_video_dec_stop);
400 videodec_class->open = GST_DEBUG_FUNCPTR (gst_amc_video_dec_open);
401 videodec_class->close = GST_DEBUG_FUNCPTR (gst_amc_video_dec_close);
402 videodec_class->flush = GST_DEBUG_FUNCPTR (gst_amc_video_dec_flush);
403 videodec_class->set_format = GST_DEBUG_FUNCPTR (gst_amc_video_dec_set_format);
404 videodec_class->handle_frame =
405 GST_DEBUG_FUNCPTR (gst_amc_video_dec_handle_frame);
406 videodec_class->finish = GST_DEBUG_FUNCPTR (gst_amc_video_dec_finish);
407 videodec_class->decide_allocation =
408 GST_DEBUG_FUNCPTR (gst_amc_video_dec_decide_allocation);
409 videodec_class->src_query = GST_DEBUG_FUNCPTR (gst_amc_video_dec_src_query);
410 }
411
412 static void
gst_amc_video_dec_init(GstAmcVideoDec * self)413 gst_amc_video_dec_init (GstAmcVideoDec * self)
414 {
415 gst_video_decoder_set_packetized (GST_VIDEO_DECODER (self), TRUE);
416 gst_video_decoder_set_needs_format (GST_VIDEO_DECODER (self), TRUE);
417
418 g_mutex_init (&self->drain_lock);
419 g_cond_init (&self->drain_cond);
420
421 g_mutex_init (&self->gl_lock);
422 g_cond_init (&self->gl_cond);
423
424 self->gl_queue = g_queue_new ();
425 }
426
427 static gboolean
gst_amc_video_dec_open(GstVideoDecoder * decoder)428 gst_amc_video_dec_open (GstVideoDecoder * decoder)
429 {
430 GstAmcVideoDec *self = GST_AMC_VIDEO_DEC (decoder);
431 GstAmcVideoDecClass *klass = GST_AMC_VIDEO_DEC_GET_CLASS (self);
432 GError *err = NULL;
433
434 GST_DEBUG_OBJECT (self, "Opening decoder");
435
436 self->codec = gst_amc_codec_new (klass->codec_info->name, FALSE, &err);
437 if (!self->codec) {
438 GST_ELEMENT_ERROR_FROM_ERROR (self, err);
439 return FALSE;
440 }
441 self->codec_config = AMC_CODEC_CONFIG_NONE;
442
443 self->started = FALSE;
444 self->flushing = TRUE;
445
446 GST_DEBUG_OBJECT (self, "Opened decoder");
447
448 return TRUE;
449 }
450
451 static gboolean
gst_amc_video_dec_close(GstVideoDecoder * decoder)452 gst_amc_video_dec_close (GstVideoDecoder * decoder)
453 {
454 GstAmcVideoDec *self = GST_AMC_VIDEO_DEC (decoder);
455
456 GST_DEBUG_OBJECT (self, "Closing decoder");
457
458 if (self->downstream_supports_gl
459 && self->codec_config == AMC_CODEC_CONFIG_WITH_SURFACE) {
460 g_mutex_lock (&self->gl_lock);
461 GST_INFO_OBJECT (self, "shutting down gl queue pushed %u ready %u "
462 "released %u", self->gl_pushed_frame_count, self->gl_ready_frame_count,
463 self->gl_released_frame_count);
464
465 g_queue_free_full (self->gl_queue, (GDestroyNotify) _gl_sync_unref);
466 self->gl_queue = g_queue_new ();
467 g_mutex_unlock (&self->gl_lock);
468
469 if (self->gl_mem_attached)
470 gst_gl_context_thread_add (self->gl_context,
471 (GstGLContextThreadFunc) _dettach_mem_from_context, self);
472 }
473 self->gl_pushed_frame_count = 0;
474 self->gl_ready_frame_count = 0;
475 self->gl_released_frame_count = 0;
476 self->gl_last_rendered_frame = 0;
477
478 if (self->surface) {
479 GError *err = NULL;
480
481 if (!gst_amc_surface_texture_set_on_frame_available_callback (self->surface,
482 NULL, NULL, &err)) {
483 GST_ERROR_OBJECT (self,
484 "Failed to unset back pointer on the listener. "
485 "crashes/hangs may ensue: %s", err ? err->message : "Unknown");
486 GST_ELEMENT_ERROR_FROM_ERROR (self, err);
487 }
488
489 gst_object_unref (self->surface);
490 self->surface = NULL;
491 }
492
493 if (self->codec) {
494 GError *err = NULL;
495
496 gst_amc_codec_release (self->codec, &err);
497 if (err)
498 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
499
500 gst_amc_codec_free (self->codec);
501 }
502
503 self->started = FALSE;
504 self->flushing = TRUE;
505 self->downstream_supports_gl = FALSE;
506
507 self->codec = NULL;
508 self->codec_config = AMC_CODEC_CONFIG_NONE;
509
510 GST_DEBUG_OBJECT (self, "Freeing GL context: %" GST_PTR_FORMAT,
511 self->gl_context);
512 if (self->gl_context) {
513 gst_object_unref (self->gl_context);
514 self->gl_context = NULL;
515 }
516
517 if (self->oes_mem) {
518 gst_memory_unref ((GstMemory *) self->oes_mem);
519 self->oes_mem = NULL;
520 }
521
522 if (self->gl_display) {
523 gst_object_unref (self->gl_display);
524 self->gl_display = NULL;
525 }
526
527 if (self->other_gl_context) {
528 gst_object_unref (self->other_gl_context);
529 self->other_gl_context = NULL;
530 }
531
532 GST_DEBUG_OBJECT (self, "Closed decoder");
533
534 return TRUE;
535 }
536
537 static void
gst_amc_video_dec_finalize(GObject * object)538 gst_amc_video_dec_finalize (GObject * object)
539 {
540 GstAmcVideoDec *self = GST_AMC_VIDEO_DEC (object);
541
542 g_mutex_clear (&self->drain_lock);
543 g_cond_clear (&self->drain_cond);
544
545 g_mutex_clear (&self->gl_lock);
546 g_cond_clear (&self->gl_cond);
547
548 if (self->gl_queue) {
549 g_queue_free_full (self->gl_queue, (GDestroyNotify) _gl_sync_unref);
550 self->gl_queue = NULL;
551 }
552
553 G_OBJECT_CLASS (parent_class)->finalize (object);
554 }
555
556 static void
gst_amc_video_dec_set_context(GstElement * element,GstContext * context)557 gst_amc_video_dec_set_context (GstElement * element, GstContext * context)
558 {
559 GstAmcVideoDec *self = GST_AMC_VIDEO_DEC (element);
560
561 gst_gl_handle_set_context (element, context, &self->gl_display,
562 &self->other_gl_context);
563
564 GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
565 }
566
567 static GstStateChangeReturn
gst_amc_video_dec_change_state(GstElement * element,GstStateChange transition)568 gst_amc_video_dec_change_state (GstElement * element, GstStateChange transition)
569 {
570 GstAmcVideoDec *self;
571 GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
572 GError *err = NULL;
573
574 g_return_val_if_fail (GST_IS_AMC_VIDEO_DEC (element),
575 GST_STATE_CHANGE_FAILURE);
576 self = GST_AMC_VIDEO_DEC (element);
577
578 GST_DEBUG_OBJECT (element, "changing state: %s => %s",
579 gst_element_state_get_name (GST_STATE_TRANSITION_CURRENT (transition)),
580 gst_element_state_get_name (GST_STATE_TRANSITION_NEXT (transition)));
581
582 switch (transition) {
583 case GST_STATE_CHANGE_NULL_TO_READY:
584 break;
585 case GST_STATE_CHANGE_READY_TO_PAUSED:
586 self->downstream_flow_ret = GST_FLOW_OK;
587 self->draining = FALSE;
588 self->started = FALSE;
589 break;
590 case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
591 break;
592 case GST_STATE_CHANGE_PAUSED_TO_READY:
593 self->flushing = TRUE;
594 if (self->started) {
595 gst_amc_codec_flush (self->codec, &err);
596 if (err)
597 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
598 }
599 g_mutex_lock (&self->drain_lock);
600 self->draining = FALSE;
601 g_cond_broadcast (&self->drain_cond);
602 g_mutex_unlock (&self->drain_lock);
603 break;
604 default:
605 break;
606 }
607
608 if (ret == GST_STATE_CHANGE_FAILURE)
609 return ret;
610
611 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
612
613 if (ret == GST_STATE_CHANGE_FAILURE)
614 return ret;
615
616 switch (transition) {
617 case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
618 break;
619 case GST_STATE_CHANGE_PAUSED_TO_READY:
620 self->downstream_flow_ret = GST_FLOW_FLUSHING;
621 self->started = FALSE;
622 break;
623 default:
624 break;
625 }
626
627 return ret;
628 }
629
630 #define MAX_FRAME_DIST_TIME (5 * GST_SECOND)
631 #define MAX_FRAME_DIST_FRAMES (100)
632
633 static GstVideoCodecFrame *
_find_nearest_frame(GstAmcVideoDec * self,GstClockTime reference_timestamp)634 _find_nearest_frame (GstAmcVideoDec * self, GstClockTime reference_timestamp)
635 {
636 GList *l, *best_l = NULL;
637 GList *finish_frames = NULL;
638 GstVideoCodecFrame *best = NULL;
639 guint64 best_timestamp = 0;
640 guint64 best_diff = G_MAXUINT64;
641 BufferIdentification *best_id = NULL;
642 GList *frames;
643
644 frames = gst_video_decoder_get_frames (GST_VIDEO_DECODER (self));
645
646 for (l = frames; l; l = l->next) {
647 GstVideoCodecFrame *tmp = l->data;
648 BufferIdentification *id = gst_video_codec_frame_get_user_data (tmp);
649 guint64 timestamp, diff;
650
651 /* This happens for frames that were just added but
652 * which were not passed to the component yet. Ignore
653 * them here!
654 */
655 if (!id)
656 continue;
657
658 timestamp = id->timestamp;
659
660 if (timestamp > reference_timestamp)
661 diff = timestamp - reference_timestamp;
662 else
663 diff = reference_timestamp - timestamp;
664
665 if (best == NULL || diff < best_diff) {
666 best = tmp;
667 best_timestamp = timestamp;
668 best_diff = diff;
669 best_l = l;
670 best_id = id;
671
672 /* For frames without timestamp we simply take the first frame */
673 if ((reference_timestamp == 0 && !GST_CLOCK_TIME_IS_VALID (timestamp))
674 || diff == 0)
675 break;
676 }
677 }
678
679 if (best_id) {
680 for (l = frames; l && l != best_l; l = l->next) {
681 GstVideoCodecFrame *tmp = l->data;
682 BufferIdentification *id = gst_video_codec_frame_get_user_data (tmp);
683 guint64 diff_time, diff_frames;
684
685 if (id->timestamp > best_timestamp)
686 break;
687
688 if (id->timestamp == 0 || best_timestamp == 0)
689 diff_time = 0;
690 else
691 diff_time = best_timestamp - id->timestamp;
692 diff_frames = best->system_frame_number - tmp->system_frame_number;
693
694 if (diff_time > MAX_FRAME_DIST_TIME
695 || diff_frames > MAX_FRAME_DIST_FRAMES) {
696 finish_frames =
697 g_list_prepend (finish_frames, gst_video_codec_frame_ref (tmp));
698 }
699 }
700 }
701
702 if (finish_frames) {
703 g_warning ("%s: Too old frames, bug in decoder -- please file a bug",
704 GST_ELEMENT_NAME (self));
705 for (l = finish_frames; l; l = l->next) {
706 gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), l->data);
707 }
708 }
709
710 if (best)
711 gst_video_codec_frame_ref (best);
712
713 g_list_foreach (frames, (GFunc) gst_video_codec_frame_unref, NULL);
714 g_list_free (frames);
715
716 return best;
717 }
718
719 static gboolean
gst_amc_video_dec_check_codec_config(GstAmcVideoDec * self)720 gst_amc_video_dec_check_codec_config (GstAmcVideoDec * self)
721 {
722 gboolean ret = (self->codec_config == AMC_CODEC_CONFIG_NONE
723 || (self->codec_config == AMC_CODEC_CONFIG_WITH_SURFACE
724 && self->downstream_supports_gl)
725 || (self->codec_config == AMC_CODEC_CONFIG_WITHOUT_SURFACE
726 && !self->downstream_supports_gl));
727
728 if (!ret) {
729 GST_ERROR_OBJECT
730 (self,
731 "Codec configuration (%d) is not compatible with downstream which %s support GL output",
732 self->codec_config, self->downstream_supports_gl ? "does" : "does not");
733 }
734
735 return ret;
736 }
737
738 static gboolean
gst_amc_video_dec_set_src_caps(GstAmcVideoDec * self,GstAmcFormat * format)739 gst_amc_video_dec_set_src_caps (GstAmcVideoDec * self, GstAmcFormat * format)
740 {
741 GstVideoCodecState *output_state;
742 const gchar *mime;
743 gint color_format, width, height;
744 gint stride, slice_height;
745 gint crop_left, crop_right;
746 gint crop_top, crop_bottom;
747 GstVideoFormat gst_format;
748 GstAmcVideoDecClass *klass = GST_AMC_VIDEO_DEC_GET_CLASS (self);
749 GError *err = NULL;
750 gboolean ret;
751
752 if (!gst_amc_format_get_int (format, "color-format", &color_format, &err) ||
753 !gst_amc_format_get_int (format, "width", &width, &err) ||
754 !gst_amc_format_get_int (format, "height", &height, &err)) {
755 GST_ERROR_OBJECT (self, "Failed to get output format metadata: %s",
756 err->message);
757 g_clear_error (&err);
758 return FALSE;
759 }
760
761 if (gst_amc_format_get_int (format, "crop-left", &crop_left, NULL) &&
762 gst_amc_format_get_int (format, "crop-right", &crop_right, NULL)) {
763 width = crop_right + 1 - crop_left;
764 }
765
766 if (gst_amc_format_get_int (format, "crop-top", &crop_top, NULL) &&
767 gst_amc_format_get_int (format, "crop-bottom", &crop_bottom, NULL)) {
768 height = crop_bottom + 1 - crop_top;
769 }
770
771 if (width == 0 || height == 0) {
772 GST_ERROR_OBJECT (self, "Height or width not set");
773 return FALSE;
774 }
775
776 mime = caps_to_mime (self->input_state->caps);
777 if (!mime) {
778 GST_ERROR_OBJECT (self, "Failed to convert caps to mime");
779 return FALSE;
780 }
781
782 if (self->codec_config == AMC_CODEC_CONFIG_WITH_SURFACE) {
783 gst_format = GST_VIDEO_FORMAT_RGBA;
784 } else {
785 gst_format =
786 gst_amc_color_format_to_video_format (klass->codec_info, mime,
787 color_format);
788 }
789
790 if (gst_format == GST_VIDEO_FORMAT_UNKNOWN) {
791 GST_ERROR_OBJECT (self, "Unknown color format 0x%08x", color_format);
792 return FALSE;
793 }
794
795 output_state = gst_video_decoder_set_output_state (GST_VIDEO_DECODER (self),
796 gst_format, width, height, self->input_state);
797
798 /* FIXME: Special handling for multiview, untested */
799 if (color_format == COLOR_QCOM_FormatYVU420SemiPlanar32mMultiView) {
800 gst_video_multiview_video_info_change_mode (&output_state->info,
801 GST_VIDEO_MULTIVIEW_MODE_TOP_BOTTOM, GST_VIDEO_MULTIVIEW_FLAGS_NONE);
802 }
803
804 memset (&self->color_format_info, 0, sizeof (self->color_format_info));
805 if (self->codec_config == AMC_CODEC_CONFIG_WITH_SURFACE) {
806 if (output_state->caps)
807 gst_caps_unref (output_state->caps);
808 output_state->caps = gst_video_info_to_caps (&output_state->info);
809 gst_caps_set_features (output_state->caps, 0,
810 gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_GL_MEMORY, NULL));
811 gst_caps_set_simple (output_state->caps, "texture-target", G_TYPE_STRING,
812 "external-oes", NULL);
813 GST_DEBUG_OBJECT (self, "Configuring for Surface output");
814
815 /* The width/height values are used in other places for
816 * checking if the resolution changed. Set everything
817 * that makes sense here
818 */
819 self->color_format_info.color_format = COLOR_FormatAndroidOpaque;
820 self->color_format_info.width = width;
821 self->color_format_info.height = height;
822 self->color_format_info.crop_left = crop_left;
823 self->color_format_info.crop_right = crop_right;
824 self->color_format_info.crop_top = crop_top;
825 self->color_format_info.crop_bottom = crop_bottom;
826
827 goto out;
828 }
829
830 if (!gst_amc_format_get_int (format, "stride", &stride, &err) ||
831 !gst_amc_format_get_int (format, "slice-height", &slice_height, &err)) {
832 GST_ERROR_OBJECT (self, "Failed to get stride and slice-height: %s",
833 err->message);
834 g_clear_error (&err);
835 return FALSE;
836 }
837
838 self->format = gst_format;
839 self->width = width;
840 self->height = height;
841 if (!gst_amc_color_format_info_set (&self->color_format_info,
842 klass->codec_info, mime, color_format, width, height, stride,
843 slice_height, crop_left, crop_right, crop_top, crop_bottom)) {
844 GST_ERROR_OBJECT (self, "Failed to set up GstAmcColorFormatInfo");
845 return FALSE;
846 }
847
848 GST_DEBUG_OBJECT (self,
849 "Color format info: {color_format=%d (0x%08x), width=%d, height=%d, "
850 "stride=%d, slice-height=%d, crop-left=%d, crop-top=%d, "
851 "crop-right=%d, crop-bottom=%d, frame-size=%d}",
852 self->color_format_info.color_format,
853 self->color_format_info.color_format, self->color_format_info.width,
854 self->color_format_info.height, self->color_format_info.stride,
855 self->color_format_info.slice_height, self->color_format_info.crop_left,
856 self->color_format_info.crop_top, self->color_format_info.crop_right,
857 self->color_format_info.crop_bottom, self->color_format_info.frame_size);
858
859 out:
860 ret = gst_video_decoder_negotiate (GST_VIDEO_DECODER (self));
861
862 gst_video_codec_state_unref (output_state);
863 self->input_state_changed = FALSE;
864
865 return ret;
866 }
867
868 static gboolean
gst_amc_video_dec_fill_buffer(GstAmcVideoDec * self,GstAmcBuffer * buf,const GstAmcBufferInfo * buffer_info,GstBuffer * outbuf)869 gst_amc_video_dec_fill_buffer (GstAmcVideoDec * self, GstAmcBuffer * buf,
870 const GstAmcBufferInfo * buffer_info, GstBuffer * outbuf)
871 {
872 GstVideoCodecState *state =
873 gst_video_decoder_get_output_state (GST_VIDEO_DECODER (self));
874 GstVideoInfo *info = &state->info;
875 gboolean ret = FALSE;
876
877 if (self->color_format_info.color_format == COLOR_FormatAndroidOpaque)
878 return FALSE;
879
880 ret =
881 gst_amc_color_format_copy (&self->color_format_info, buf, buffer_info,
882 info, outbuf, COLOR_FORMAT_COPY_OUT);
883
884 gst_video_codec_state_unref (state);
885 return ret;
886 }
887
888 static const gfloat yflip_matrix[16] = {
889 1.0f, 0.0f, 0.0f, 0.0f,
890 0.0f, -1.0f, 0.0f, 0.0f,
891 0.0f, 0.0f, 1.0f, 0.0f,
892 0.0f, 1.0f, 0.0f, 1.0f
893 };
894
895 static void
_amc_gl_set_sync(GstGLSyncMeta * sync_meta,GstGLContext * context)896 _amc_gl_set_sync (GstGLSyncMeta * sync_meta, GstGLContext * context)
897 {
898 }
899
900 static void
_gl_sync_release_buffer(struct gl_sync * sync,gboolean render)901 _gl_sync_release_buffer (struct gl_sync *sync, gboolean render)
902 {
903 GError *error = NULL;
904
905 if (!sync->result->released) {
906 sync->released_ts = g_get_monotonic_time ();
907
908 if ((gint) (sync->sink->gl_released_frame_count -
909 sync->sink->gl_ready_frame_count) > 0) {
910 guint diff =
911 sync->sink->gl_released_frame_count -
912 sync->sink->gl_ready_frame_count - 1u;
913 sync->sink->gl_ready_frame_count += diff;
914 GST_LOG ("gl_sync %p possible \'on_frame_available\' listener miss "
915 "detected, attempting to work around. Jumping forward %u "
916 "frames for frame %u", sync, diff, sync->gl_frame_no);
917 }
918
919 GST_TRACE ("gl_sync %p release_output_buffer idx %u frame %u render %s",
920 sync, sync->buffer_idx, sync->gl_frame_no, render ? "TRUE" : "FALSE");
921
922 /* Release the frame into the surface */
923 sync->sink->gl_released_frame_count++;
924 if (!render) {
925 /* Advance the ready counter ourselves if we aren't going to render
926 * and therefore receive a listener callback */
927 sync->sink->gl_ready_frame_count++;
928 }
929
930 if (!gst_amc_codec_release_output_buffer (sync->sink->codec,
931 sync->buffer_idx, render, &error)) {
932 GST_ERROR_OBJECT (sync->sink,
933 "gl_sync %p Failed to render buffer, index %d frame %u", sync,
934 sync->buffer_idx, sync->gl_frame_no);
935 goto out;
936 }
937 sync->result->released = TRUE;
938 sync->result->rendered = render;
939 }
940
941 out:
942 if (error) {
943 if (sync->sink->gl_error == NULL)
944 sync->sink->gl_error = error;
945 else
946 g_clear_error (&error);
947 }
948 }
949
950 static void
_gl_sync_release_next_buffer(struct gl_sync * sync,gboolean render)951 _gl_sync_release_next_buffer (struct gl_sync *sync, gboolean render)
952 {
953 GList *l;
954
955 if ((l = _find_gl_sync_for_frame (sync->sink, sync->gl_frame_no + 1))) {
956 struct gl_sync *next = l->data;
957
958 _gl_sync_release_buffer (next, render);
959 } else {
960 GST_TRACE ("gl_sync %p no next frame available", sync);
961 }
962 }
963
964 #define I(x,y) ((y)*4+(x))
965 static int
affine_inverse(float in[],float out[])966 affine_inverse (float in[], float out[])
967 {
968 float s0, s1, s2, s3, s4, s5;
969 float c0, c1, c2, c3, c4, c5;
970 float det, invdet;
971
972 s0 = in[0] * in[I (1, 1)] - in[I (1, 0)] * in[I (0, 1)];
973 s1 = in[0] * in[I (1, 2)] - in[I (1, 0)] * in[I (0, 2)];
974 s2 = in[0] * in[I (1, 3)] - in[I (1, 0)] * in[I (0, 3)];
975 s3 = in[1] * in[I (1, 2)] - in[I (1, 1)] * in[I (0, 2)];
976 s4 = in[1] * in[I (1, 3)] - in[I (1, 1)] * in[I (0, 3)];
977 s5 = in[2] * in[I (1, 3)] - in[I (1, 2)] * in[I (0, 3)];
978
979 c0 = in[I (2, 0)] * in[I (3, 1)] - in[I (3, 0)] * in[I (2, 1)];
980 c1 = in[I (2, 0)] * in[I (3, 2)] - in[I (3, 0)] * in[I (2, 2)];
981 c2 = in[I (2, 0)] * in[I (3, 3)] - in[I (3, 0)] * in[I (2, 3)];
982 c3 = in[I (2, 1)] * in[I (3, 2)] - in[I (3, 1)] * in[I (2, 2)];
983 c4 = in[I (2, 1)] * in[I (3, 3)] - in[I (3, 1)] * in[I (2, 3)];
984 c5 = in[I (2, 2)] * in[I (3, 3)] - in[I (3, 2)] * in[I (2, 3)];
985
986 det = s0 * c5 - s1 * c4 + s2 * c3 + s3 * c2 - s4 * c1 + s5 * c0;
987 if (det == 0.0)
988 return 0;
989 invdet = 1.0 / det;
990
991 out[I (0, 0)] =
992 (in[I (1, 1)] * c5 - in[I (1, 2)] * c4 + in[I (1, 3)] * c3) * invdet;
993 out[I (0, 1)] =
994 (-in[I (0, 1)] * c5 + in[I (0, 2)] * c4 - in[I (0, 3)] * c3) * invdet;
995 out[I (0, 2)] =
996 (in[I (3, 1)] * s5 - in[I (3, 2)] * s4 + in[I (3, 3)] * s3) * invdet;
997 out[I (0, 3)] =
998 (-in[I (2, 1)] * s5 + in[I (2, 2)] * s4 - in[I (2, 3)] * s3) * invdet;
999
1000 out[I (1, 0)] =
1001 (-in[I (1, 0)] * c5 + in[I (1, 2)] * c2 - in[I (1, 3)] * c1) * invdet;
1002 out[I (1, 1)] =
1003 (in[I (0, 0)] * c5 - in[I (0, 2)] * c2 + in[I (0, 3)] * c1) * invdet;
1004 out[I (1, 2)] =
1005 (-in[I (3, 0)] * s5 + in[I (3, 2)] * s2 - in[I (3, 3)] * s1) * invdet;
1006 out[I (1, 3)] =
1007 (in[I (2, 0)] * s5 - in[I (2, 2)] * s2 + in[I (2, 3)] * s1) * invdet;
1008
1009 out[I (2, 0)] =
1010 (in[I (1, 0)] * c4 - in[I (1, 1)] * c2 + in[I (1, 3)] * c0) * invdet;
1011 out[I (2, 1)] =
1012 (-in[I (0, 0)] * c4 + in[I (0, 1)] * c2 - in[I (0, 3)] * c0) * invdet;
1013 out[I (2, 2)] =
1014 (in[I (3, 0)] * s4 - in[I (3, 1)] * s2 + in[I (3, 3)] * s0) * invdet;
1015 out[I (2, 3)] =
1016 (-in[I (2, 0)] * s4 + in[I (2, 1)] * s2 - in[I (2, 3)] * s0) * invdet;
1017
1018 out[I (3, 0)] =
1019 (-in[I (1, 0)] * c3 + in[I (1, 1)] * c1 - in[I (1, 2)] * c0) * invdet;
1020 out[I (3, 1)] =
1021 (in[I (0, 0)] * c3 - in[I (0, 1)] * c1 + in[I (0, 2)] * c0) * invdet;
1022 out[I (3, 2)] =
1023 (-in[I (3, 0)] * s3 + in[I (3, 1)] * s1 - in[I (3, 2)] * s0) * invdet;
1024 out[I (3, 3)] =
1025 (in[I (2, 0)] * s3 - in[I (2, 1)] * s1 + in[I (2, 2)] * s0) * invdet;
1026
1027 return 1;
1028 }
1029
1030 #undef I
1031
1032 /* caller should remove from the gl_queue after calling this function.
1033 * _gl_sync_release_buffer must be called before this function */
1034 static void
_gl_sync_render_unlocked(struct gl_sync * sync)1035 _gl_sync_render_unlocked (struct gl_sync *sync)
1036 {
1037 GstVideoAffineTransformationMeta *af_meta;
1038 GError *error = NULL;
1039 gfloat matrix[16];
1040 gint64 ts = 0;
1041
1042 GST_TRACE ("gl_sync %p result %p render (updated:%u)", sync, sync->result,
1043 sync->result->updated);
1044
1045 if (sync->result->updated || !sync->result->rendered)
1046 return;
1047
1048 /* FIXME: if this ever starts returning valid values we should attempt
1049 * to use it */
1050 if (!gst_amc_surface_texture_get_timestamp (sync->surface, &ts, &error)) {
1051 GST_ERROR_OBJECT (sync->sink, "Failed to update texture image");
1052 GST_ELEMENT_ERROR_FROM_ERROR (sync->sink, error);
1053 goto out;
1054 }
1055 GST_TRACE ("gl_sync %p rendering timestamp before update %" G_GINT64_FORMAT,
1056 sync, ts);
1057
1058 GST_TRACE ("gl_sync %p update_tex_image", sync);
1059 if (!gst_amc_surface_texture_update_tex_image (sync->surface, &error)) {
1060 GST_ERROR_OBJECT (sync->sink, "Failed to update texture image");
1061 GST_ELEMENT_ERROR_FROM_ERROR (sync->sink, error);
1062 goto out;
1063 }
1064 GST_TRACE ("gl_sync result %p updated", sync->result);
1065 sync->result->updated = TRUE;
1066 sync->sink->gl_last_rendered_frame = sync->gl_frame_no;
1067
1068 if (!gst_amc_surface_texture_get_timestamp (sync->surface, &ts, &error)) {
1069 GST_ERROR_OBJECT (sync->sink, "Failed to update texture image");
1070 GST_ELEMENT_ERROR_FROM_ERROR (sync->sink, error);
1071 goto out;
1072 }
1073 GST_TRACE ("gl_sync %p rendering timestamp after update %" G_GINT64_FORMAT,
1074 sync, ts);
1075
1076 af_meta = gst_buffer_get_video_affine_transformation_meta (sync->buffer);
1077 if (!af_meta) {
1078 GST_WARNING ("Failed to retrieve the transformation meta from the "
1079 "gl_sync %p buffer %p", sync, sync->buffer);
1080 } else if (gst_amc_surface_texture_get_transform_matrix (sync->surface,
1081 matrix, &error)) {
1082 gfloat inv_mat[16];
1083
1084 /* The transform from mediacodec applies to the texture coords, but
1085 * GStreamer affine meta applies to the video geometry, which is the
1086 * opposite - so we invert it */
1087 if (affine_inverse (matrix, inv_mat)) {
1088 gst_video_affine_transformation_meta_apply_matrix (af_meta, inv_mat);
1089 } else {
1090 GST_WARNING
1091 ("Failed to invert display transform - the video won't display right. "
1092 "Transform matrix [ %f %f %f %f, %f %f %f %f, %f %f %f %f, %f %f %f %f ]",
1093 matrix[0], matrix[1], matrix[2], matrix[3], matrix[4], matrix[5],
1094 matrix[6], matrix[7], matrix[8], matrix[9], matrix[10], matrix[11],
1095 matrix[12], matrix[13], matrix[14], matrix[15]);
1096 }
1097 gst_video_affine_transformation_meta_apply_matrix (af_meta, yflip_matrix);
1098 }
1099
1100 GST_LOG ("gl_sync %p successfully updated SurfaceTexture %p into "
1101 "OES texture %u", sync, sync->surface, sync->oes_mem->tex_id);
1102
1103 out:
1104 if (error) {
1105 if (sync->sink->gl_error == NULL)
1106 sync->sink->gl_error = error;
1107 else
1108 g_clear_error (&error);
1109 }
1110
1111 _gl_sync_release_next_buffer (sync, TRUE);
1112 }
1113
1114 static gboolean
_amc_gl_possibly_wait_for_gl_sync(struct gl_sync * sync,gint64 end_time)1115 _amc_gl_possibly_wait_for_gl_sync (struct gl_sync *sync, gint64 end_time)
1116 {
1117 GST_TRACE ("gl_sync %p waiting for frame %u current %u updated %u ", sync,
1118 sync->gl_frame_no, sync->sink->gl_ready_frame_count,
1119 sync->result->updated);
1120
1121 if ((gint) (sync->sink->gl_last_rendered_frame - sync->gl_frame_no) > 0) {
1122 GST_ERROR ("gl_sync %p unsuccessfully waited for frame %u. out of order "
1123 "wait detected", sync, sync->gl_frame_no);
1124 return FALSE;
1125 }
1126
1127 /* The number of frame callbacks (gl_ready_frame_count) is not a direct
1128 * relationship with the number of pushed buffers (gl_pushed_frame_count)
1129 * or even, the number of released buffers (gl_released_frame_count)
1130 * as, from the frameworks/native/include/gui/ConsumerBase.h file,
1131 *
1132 * "...frames that are queued while in asynchronous mode only trigger the
1133 * callback if no previous frames are pending."
1134 *
1135 * As a result, we need to advance the ready counter somehow ourselves when
1136 * such events happen. There is no reliable way of knowing when/if the frame
1137 * listener is going to fire. The only uniqueu identifier,
1138 * SurfaceTexture::get_timestamp seems to always return 0.
1139 *
1140 * The maximum queue size as defined in
1141 * frameworks/native/include/gui/BufferQueue.h
1142 * is 32 of which a maximum of 30 can be acquired at a time so we picked a
1143 * number less than that to wait for before updating the ready frame count.
1144 */
1145
1146 while (!sync->result->updated
1147 && (gint) (sync->sink->gl_ready_frame_count - sync->gl_frame_no) < 0) {
1148 /* The time limit is need otherwise when amc decides to not emit the
1149 * frame listener (say, on orientation changes) we don't wait foreever */
1150 if (end_time == -1 || !g_cond_wait_until (&sync->sink->gl_cond,
1151 &sync->sink->gl_lock, end_time)) {
1152 GST_LOG ("gl_sync %p unsuccessfully waited for frame %u", sync,
1153 sync->gl_frame_no);
1154 return FALSE;
1155 }
1156 }
1157 GST_LOG ("gl_sync %p successfully waited for frame %u", sync,
1158 sync->gl_frame_no);
1159
1160 return TRUE;
1161 }
1162
1163 static gboolean
_amc_gl_iterate_queue_unlocked(GstGLSyncMeta * sync_meta,gboolean wait)1164 _amc_gl_iterate_queue_unlocked (GstGLSyncMeta * sync_meta, gboolean wait)
1165 {
1166 struct gl_sync *sync = sync_meta->data;
1167 struct gl_sync *tmp;
1168 gboolean ret = TRUE;
1169 gint64 end_time;
1170
1171 while ((tmp = g_queue_peek_head (sync->sink->gl_queue))) {
1172 /* skip frames that are ahead of the current wait frame */
1173 if ((gint) (sync->gl_frame_no - tmp->gl_frame_no) < 0) {
1174 GST_TRACE ("gl_sync %p frame %u is ahead of gl_sync %p frame %u", tmp,
1175 tmp->gl_frame_no, sync, sync->gl_frame_no);
1176 break;
1177 }
1178
1179 _gl_sync_release_buffer (tmp, wait);
1180
1181 /* Frames are currently pushed in order and waits need to be performed
1182 * in the same order */
1183
1184 end_time = wait ? 30 * G_TIME_SPAN_MILLISECOND + tmp->released_ts : -1;
1185 if (!_amc_gl_possibly_wait_for_gl_sync (tmp, end_time))
1186 ret = FALSE;
1187
1188 _gl_sync_render_unlocked (tmp);
1189
1190 g_queue_pop_head (tmp->sink->gl_queue);
1191 _gl_sync_unref (tmp);
1192 }
1193
1194 return ret;
1195 }
1196
1197 struct gl_wait
1198 {
1199 GstGLSyncMeta *sync_meta;
1200 gboolean ret;
1201 };
1202
1203 static void
_amc_gl_wait_gl(GstGLContext * context,struct gl_wait * wait)1204 _amc_gl_wait_gl (GstGLContext * context, struct gl_wait *wait)
1205 {
1206 struct gl_sync *sync = wait->sync_meta->data;
1207
1208 g_mutex_lock (&sync->sink->gl_lock);
1209 wait->ret = _amc_gl_iterate_queue_unlocked (wait->sync_meta, TRUE);
1210 g_mutex_unlock (&sync->sink->gl_lock);
1211 }
1212
1213 static void
_amc_gl_wait(GstGLSyncMeta * sync_meta,GstGLContext * context)1214 _amc_gl_wait (GstGLSyncMeta * sync_meta, GstGLContext * context)
1215 {
1216 struct gl_sync *sync = sync_meta->data;
1217 struct gl_wait wait;
1218
1219 wait.sync_meta = sync_meta;
1220 wait.ret = FALSE;
1221 gst_gl_context_thread_add (context,
1222 (GstGLContextThreadFunc) _amc_gl_wait_gl, &wait);
1223
1224 if (!wait.ret)
1225 GST_WARNING ("gl_sync %p could not wait for frame, took too long", sync);
1226 }
1227
1228 static void
_amc_gl_copy(GstGLSyncMeta * src,GstBuffer * sbuffer,GstGLSyncMeta * dest,GstBuffer * dbuffer)1229 _amc_gl_copy (GstGLSyncMeta * src, GstBuffer * sbuffer, GstGLSyncMeta * dest,
1230 GstBuffer * dbuffer)
1231 {
1232 struct gl_sync *sync = src->data;
1233 struct gl_sync *tmp;
1234
1235 tmp = g_new0 (struct gl_sync, 1);
1236
1237 GST_TRACE ("copying gl_sync %p to %p", sync, tmp);
1238
1239 g_mutex_lock (&sync->sink->gl_lock);
1240
1241 tmp->refcount = 1;
1242 tmp->sink = gst_object_ref (sync->sink);
1243 tmp->buffer = dbuffer;
1244 tmp->oes_mem = (GstGLMemory *) gst_memory_ref ((GstMemory *) sync->oes_mem);
1245 tmp->surface = g_object_ref (sync->surface);
1246 tmp->gl_frame_no = sync->gl_frame_no;
1247 tmp->released_ts = sync->released_ts;
1248 tmp->result = sync->result;
1249 _gl_sync_result_ref (tmp->result);
1250 dest->data = tmp;
1251
1252 g_mutex_unlock (&sync->sink->gl_lock);
1253 }
1254
1255 static void
_amc_gl_render_on_free(GstGLContext * context,GstGLSyncMeta * sync_meta)1256 _amc_gl_render_on_free (GstGLContext * context, GstGLSyncMeta * sync_meta)
1257 {
1258 struct gl_sync *sync = sync_meta->data;
1259
1260 g_mutex_lock (&sync->sink->gl_lock);
1261 /* just render as many frames as we have */
1262 _amc_gl_iterate_queue_unlocked (sync_meta, FALSE);
1263 g_mutex_unlock (&sync->sink->gl_lock);
1264 }
1265
1266 static void
_amc_gl_free(GstGLSyncMeta * sync_meta,GstGLContext * context)1267 _amc_gl_free (GstGLSyncMeta * sync_meta, GstGLContext * context)
1268 {
1269 struct gl_sync *sync = sync_meta->data;
1270
1271 /* The wait render queue inside android is not very deep so when we drop
1272 * frames we need to signal that we have rendered them if we have any chance
1273 * of keeping up between the decoder, the android GL queue and downstream
1274 * OpenGL. If we don't do this, once we start dropping frames downstream,
1275 * it is very near to impossible for the pipeline to catch up. */
1276 gst_gl_context_thread_add (context,
1277 (GstGLContextThreadFunc) _amc_gl_render_on_free, sync_meta);
1278 _gl_sync_unref (sync);
1279 }
1280
1281 static void
gst_amc_video_dec_loop(GstAmcVideoDec * self)1282 gst_amc_video_dec_loop (GstAmcVideoDec * self)
1283 {
1284 GstVideoCodecFrame *frame;
1285 GstFlowReturn flow_ret = GST_FLOW_OK;
1286 GstClockTimeDiff deadline;
1287 gboolean is_eos;
1288 GstAmcBuffer *buf;
1289 GstAmcBufferInfo buffer_info;
1290 gint idx;
1291 GError *err = NULL;
1292 gboolean release_buffer = TRUE;
1293
1294 GST_VIDEO_DECODER_STREAM_LOCK (self);
1295
1296 retry:
1297 /*if (self->input_state_changed) {
1298 idx = INFO_OUTPUT_FORMAT_CHANGED;
1299 } else { */
1300 GST_DEBUG_OBJECT (self, "Waiting for available output buffer");
1301 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1302 /* Wait at most 100ms here, some codecs don't fail dequeueing if
1303 * the codec is flushing, causing deadlocks during shutdown */
1304 idx =
1305 gst_amc_codec_dequeue_output_buffer (self->codec, &buffer_info, 100000,
1306 &err);
1307 GST_VIDEO_DECODER_STREAM_LOCK (self);
1308 /*} */
1309
1310 GST_DEBUG_OBJECT (self, "dequeueOutputBuffer() returned %d (0x%x)", idx, idx);
1311
1312 if (idx < 0) {
1313 if (self->flushing) {
1314 g_clear_error (&err);
1315 goto flushing;
1316 }
1317
1318 switch (idx) {
1319 case INFO_OUTPUT_BUFFERS_CHANGED:
1320 /* Handled internally */
1321 g_assert_not_reached ();
1322 break;
1323 case INFO_OUTPUT_FORMAT_CHANGED:{
1324 GstAmcFormat *format;
1325 gchar *format_string;
1326
1327 GST_DEBUG_OBJECT (self, "Output format has changed");
1328
1329 format = gst_amc_codec_get_output_format (self->codec, &err);
1330 if (!format)
1331 goto format_error;
1332
1333 format_string = gst_amc_format_to_string (format, &err);
1334 if (!format) {
1335 gst_amc_format_free (format);
1336 goto format_error;
1337 }
1338 GST_DEBUG_OBJECT (self, "Got new output format: %s", format_string);
1339 g_free (format_string);
1340
1341 if (!gst_amc_video_dec_set_src_caps (self, format)) {
1342 gst_amc_format_free (format);
1343 goto format_error;
1344 }
1345 gst_amc_format_free (format);
1346
1347 goto retry;
1348 }
1349 case INFO_TRY_AGAIN_LATER:
1350 GST_DEBUG_OBJECT (self, "Dequeueing output buffer timed out");
1351 goto retry;
1352 case G_MININT:
1353 GST_ERROR_OBJECT (self, "Failure dequeueing output buffer");
1354 goto dequeue_error;
1355 default:
1356 g_assert_not_reached ();
1357 break;
1358 }
1359
1360 goto retry;
1361 }
1362
1363 GST_DEBUG_OBJECT (self,
1364 "Got output buffer at index %d: offset %d size %d time %" G_GINT64_FORMAT
1365 " flags 0x%08x", idx, buffer_info.offset, buffer_info.size,
1366 buffer_info.presentation_time_us, buffer_info.flags);
1367
1368 buf = gst_amc_codec_get_output_buffer (self->codec, idx, &err);
1369 if (err) {
1370 if (self->flushing) {
1371 g_clear_error (&err);
1372 goto flushing;
1373 }
1374 goto failed_to_get_output_buffer;
1375 }
1376
1377 if (self->codec_config != AMC_CODEC_CONFIG_WITH_SURFACE && !buf)
1378 goto got_null_output_buffer;
1379
1380 frame =
1381 _find_nearest_frame (self,
1382 gst_util_uint64_scale (buffer_info.presentation_time_us, GST_USECOND, 1));
1383
1384 is_eos = ! !(buffer_info.flags & BUFFER_FLAG_END_OF_STREAM);
1385
1386 if (frame
1387 && (deadline =
1388 gst_video_decoder_get_max_decode_time (GST_VIDEO_DECODER (self),
1389 frame)) < 0) {
1390 GST_WARNING_OBJECT (self,
1391 "Frame is too late, dropping (deadline %" GST_STIME_FORMAT ")",
1392 GST_STIME_ARGS (deadline));
1393 flow_ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), frame);
1394 } else if (frame && self->codec_config == AMC_CODEC_CONFIG_WITH_SURFACE) {
1395 GstBuffer *outbuf;
1396 GstGLSyncMeta *sync_meta;
1397 GstVideoCodecState *state;
1398 struct gl_sync *sync;
1399 gboolean first_buffer = FALSE;
1400
1401 g_mutex_lock (&self->gl_lock);
1402 if (self->gl_error) {
1403 GST_ELEMENT_ERROR_FROM_ERROR (self, self->gl_error);
1404 g_mutex_unlock (&self->gl_lock);
1405 goto gl_output_error;
1406 }
1407 g_mutex_unlock (&self->gl_lock);
1408
1409 outbuf = gst_buffer_new ();
1410
1411 state = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (self));
1412
1413 if (!self->oes_mem) {
1414 GstGLBaseMemoryAllocator *base_mem_alloc;
1415 GstGLVideoAllocationParams *params;
1416
1417 base_mem_alloc =
1418 GST_GL_BASE_MEMORY_ALLOCATOR (gst_allocator_find
1419 (GST_GL_MEMORY_ALLOCATOR_NAME));
1420
1421 params = gst_gl_video_allocation_params_new (self->gl_context, NULL,
1422 &state->info, 0, NULL, GST_GL_TEXTURE_TARGET_EXTERNAL_OES,
1423 GST_GL_RGBA);
1424
1425 self->oes_mem = (GstGLMemory *) gst_gl_base_memory_alloc (base_mem_alloc,
1426 (GstGLAllocationParams *) params);
1427 gst_gl_allocation_params_free ((GstGLAllocationParams *) params);
1428 gst_object_unref (base_mem_alloc);
1429
1430 gst_gl_context_thread_add (self->gl_context,
1431 (GstGLContextThreadFunc) _attach_mem_to_context, self);
1432
1433 first_buffer = TRUE;
1434 }
1435
1436 gst_video_codec_state_unref (state);
1437
1438 gst_buffer_append_memory (outbuf,
1439 gst_memory_ref ((GstMemory *) self->oes_mem));
1440
1441 sync = g_new0 (struct gl_sync, 1);
1442 sync->refcount = 1;
1443 sync->sink = g_object_ref (self);
1444 sync->buffer = outbuf;
1445 sync->surface = g_object_ref (self->surface);
1446 sync->oes_mem =
1447 (GstGLMemory *) gst_memory_ref ((GstMemory *) self->oes_mem);
1448 sync->buffer_idx = idx;
1449 sync->result = g_new0 (struct gl_sync_result, 1);
1450 sync->result->refcount = 1;
1451 sync->result->updated = FALSE;
1452
1453 GST_TRACE ("new gl_sync %p result %p", sync, sync->result);
1454
1455 sync_meta = gst_buffer_add_gl_sync_meta_full (self->gl_context, outbuf,
1456 sync);
1457 sync_meta->set_sync = _amc_gl_set_sync;
1458 sync_meta->wait = _amc_gl_wait;
1459 sync_meta->wait_cpu = _amc_gl_wait;
1460 sync_meta->copy = _amc_gl_copy;
1461 sync_meta->free = _amc_gl_free;
1462
1463 /* The meta needs to be created now:
1464 * Later (in _gl_sync_render_unlocked) the buffer will be locked.
1465 */
1466 gst_buffer_add_video_affine_transformation_meta (outbuf);
1467
1468 g_mutex_lock (&self->gl_lock);
1469
1470 self->gl_pushed_frame_count++;
1471 sync->gl_frame_no = self->gl_pushed_frame_count;
1472 g_queue_push_tail (self->gl_queue, _gl_sync_ref (sync));
1473
1474 if (first_buffer) {
1475 _gl_sync_release_buffer (sync, TRUE);
1476 if (self->gl_error) {
1477 gst_buffer_unref (outbuf);
1478 g_mutex_unlock (&self->gl_lock);
1479 goto gl_output_error;
1480 }
1481 }
1482 g_mutex_unlock (&self->gl_lock);
1483
1484 GST_DEBUG_OBJECT (self, "push GL frame %u", sync->gl_frame_no);
1485 frame->output_buffer = outbuf;
1486 flow_ret = gst_video_decoder_finish_frame (GST_VIDEO_DECODER (self), frame);
1487
1488 release_buffer = FALSE;
1489 } else if (self->codec_config == AMC_CODEC_CONFIG_WITHOUT_SURFACE && !frame
1490 && buffer_info.size > 0) {
1491 GstBuffer *outbuf;
1492
1493 /* This sometimes happens at EOS or if the input is not properly framed,
1494 * let's handle it gracefully by allocating a new buffer for the current
1495 * caps and filling it
1496 */
1497 GST_ERROR_OBJECT (self, "No corresponding frame found");
1498
1499 outbuf =
1500 gst_video_decoder_allocate_output_buffer (GST_VIDEO_DECODER (self));
1501
1502 if (!gst_amc_video_dec_fill_buffer (self, buf, &buffer_info, outbuf)) {
1503 gst_buffer_unref (outbuf);
1504 if (!gst_amc_codec_release_output_buffer (self->codec, idx, FALSE, &err))
1505 GST_ERROR_OBJECT (self, "Failed to release output buffer index %d",
1506 idx);
1507 if (err && !self->flushing)
1508 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
1509 g_clear_error (&err);
1510 gst_amc_buffer_free (buf);
1511 buf = NULL;
1512 goto invalid_buffer;
1513 }
1514
1515 GST_BUFFER_PTS (outbuf) =
1516 gst_util_uint64_scale (buffer_info.presentation_time_us, GST_USECOND,
1517 1);
1518 flow_ret = gst_pad_push (GST_VIDEO_DECODER_SRC_PAD (self), outbuf);
1519 } else if (self->codec_config == AMC_CODEC_CONFIG_WITHOUT_SURFACE && frame
1520 && buffer_info.size > 0) {
1521 if ((flow_ret =
1522 gst_video_decoder_allocate_output_frame (GST_VIDEO_DECODER (self),
1523 frame)) != GST_FLOW_OK) {
1524 GST_ERROR_OBJECT (self, "Failed to allocate buffer");
1525 if (!gst_amc_codec_release_output_buffer (self->codec, idx, FALSE, &err))
1526 GST_ERROR_OBJECT (self, "Failed to release output buffer index %d",
1527 idx);
1528 if (err && !self->flushing)
1529 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
1530 g_clear_error (&err);
1531 gst_amc_buffer_free (buf);
1532 buf = NULL;
1533 goto flow_error;
1534 }
1535
1536 if (!gst_amc_video_dec_fill_buffer (self, buf, &buffer_info,
1537 frame->output_buffer)) {
1538 gst_buffer_replace (&frame->output_buffer, NULL);
1539 gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), frame);
1540 if (!gst_amc_codec_release_output_buffer (self->codec, idx, FALSE, &err))
1541 GST_ERROR_OBJECT (self, "Failed to release output buffer index %d",
1542 idx);
1543 if (err && !self->flushing)
1544 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
1545 g_clear_error (&err);
1546 gst_amc_buffer_free (buf);
1547 buf = NULL;
1548 goto invalid_buffer;
1549 }
1550
1551 flow_ret = gst_video_decoder_finish_frame (GST_VIDEO_DECODER (self), frame);
1552 } else if (frame != NULL) {
1553 flow_ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), frame);
1554 }
1555
1556 if (buf) {
1557 gst_amc_buffer_free (buf);
1558 buf = NULL;
1559 }
1560
1561 if (release_buffer) {
1562 if (!gst_amc_codec_release_output_buffer (self->codec, idx, FALSE, &err)) {
1563 if (self->flushing) {
1564 g_clear_error (&err);
1565 goto flushing;
1566 }
1567 goto failed_release;
1568 }
1569 }
1570
1571 if (is_eos || flow_ret == GST_FLOW_EOS) {
1572 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1573 g_mutex_lock (&self->drain_lock);
1574 if (self->draining) {
1575 GST_DEBUG_OBJECT (self, "Drained");
1576 self->draining = FALSE;
1577 g_cond_broadcast (&self->drain_cond);
1578 } else if (flow_ret == GST_FLOW_OK) {
1579 GST_DEBUG_OBJECT (self, "Component signalled EOS");
1580 flow_ret = GST_FLOW_EOS;
1581 }
1582 g_mutex_unlock (&self->drain_lock);
1583 GST_VIDEO_DECODER_STREAM_LOCK (self);
1584 } else {
1585 GST_DEBUG_OBJECT (self, "Finished frame: %s", gst_flow_get_name (flow_ret));
1586 }
1587
1588 self->downstream_flow_ret = flow_ret;
1589
1590 if (flow_ret != GST_FLOW_OK)
1591 goto flow_error;
1592
1593 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1594
1595 return;
1596
1597 dequeue_error:
1598 {
1599 GST_ELEMENT_ERROR_FROM_ERROR (self, err);
1600 gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
1601 gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1602 self->downstream_flow_ret = GST_FLOW_ERROR;
1603 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1604 g_mutex_lock (&self->drain_lock);
1605 self->draining = FALSE;
1606 g_cond_broadcast (&self->drain_cond);
1607 g_mutex_unlock (&self->drain_lock);
1608 return;
1609 }
1610
1611 format_error:
1612 {
1613 if (err)
1614 GST_ELEMENT_ERROR_FROM_ERROR (self, err);
1615 else
1616 GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL),
1617 ("Failed to handle format"));
1618 gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
1619 gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1620 self->downstream_flow_ret = GST_FLOW_ERROR;
1621 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1622 g_mutex_lock (&self->drain_lock);
1623 self->draining = FALSE;
1624 g_cond_broadcast (&self->drain_cond);
1625 g_mutex_unlock (&self->drain_lock);
1626 return;
1627 }
1628 failed_release:
1629 {
1630 GST_VIDEO_DECODER_ERROR_FROM_ERROR (self, err);
1631 gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
1632 gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1633 self->downstream_flow_ret = GST_FLOW_ERROR;
1634 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1635 g_mutex_lock (&self->drain_lock);
1636 self->draining = FALSE;
1637 g_cond_broadcast (&self->drain_cond);
1638 g_mutex_unlock (&self->drain_lock);
1639 return;
1640 }
1641 flushing:
1642 {
1643 GST_DEBUG_OBJECT (self, "Flushing -- stopping task");
1644 gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1645 self->downstream_flow_ret = GST_FLOW_FLUSHING;
1646 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1647 return;
1648 }
1649
1650 flow_error:
1651 {
1652 if (flow_ret == GST_FLOW_EOS) {
1653 GST_DEBUG_OBJECT (self, "EOS");
1654 gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self),
1655 gst_event_new_eos ());
1656 gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1657 } else if (flow_ret < GST_FLOW_EOS) {
1658 GST_ELEMENT_FLOW_ERROR (self, flow_ret);
1659 gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self),
1660 gst_event_new_eos ());
1661 gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1662 } else if (flow_ret == GST_FLOW_FLUSHING) {
1663 GST_DEBUG_OBJECT (self, "Flushing -- stopping task");
1664 gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1665 }
1666 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1667 g_mutex_lock (&self->drain_lock);
1668 self->draining = FALSE;
1669 g_cond_broadcast (&self->drain_cond);
1670 g_mutex_unlock (&self->drain_lock);
1671 return;
1672 }
1673
1674 failed_to_get_output_buffer:
1675 {
1676 GST_VIDEO_DECODER_ERROR_FROM_ERROR (self, err);
1677 gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
1678 gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1679 self->downstream_flow_ret = GST_FLOW_ERROR;
1680 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1681 g_mutex_lock (&self->drain_lock);
1682 self->draining = FALSE;
1683 g_cond_broadcast (&self->drain_cond);
1684 g_mutex_unlock (&self->drain_lock);
1685 return;
1686 }
1687
1688 got_null_output_buffer:
1689 {
1690 GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL),
1691 ("Got no output buffer"));
1692 gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
1693 gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1694 self->downstream_flow_ret = GST_FLOW_ERROR;
1695 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1696 g_mutex_lock (&self->drain_lock);
1697 self->draining = FALSE;
1698 g_cond_broadcast (&self->drain_cond);
1699 g_mutex_unlock (&self->drain_lock);
1700 return;
1701 }
1702
1703 invalid_buffer:
1704 {
1705 GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL),
1706 ("Invalid sized input buffer"));
1707 gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
1708 gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1709 self->downstream_flow_ret = GST_FLOW_NOT_NEGOTIATED;
1710 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1711 g_mutex_lock (&self->drain_lock);
1712 self->draining = FALSE;
1713 g_cond_broadcast (&self->drain_cond);
1714 g_mutex_unlock (&self->drain_lock);
1715 return;
1716 }
1717 gl_output_error:
1718 {
1719 if (buf) {
1720 gst_amc_buffer_free (buf);
1721 buf = NULL;
1722 }
1723 gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
1724 gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1725 self->downstream_flow_ret = GST_FLOW_NOT_NEGOTIATED;
1726 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1727 g_mutex_lock (&self->drain_lock);
1728 self->draining = FALSE;
1729 g_cond_broadcast (&self->drain_cond);
1730 g_mutex_unlock (&self->drain_lock);
1731 return;
1732 }
1733 }
1734
1735 static gboolean
gst_amc_video_dec_start(GstVideoDecoder * decoder)1736 gst_amc_video_dec_start (GstVideoDecoder * decoder)
1737 {
1738 GstAmcVideoDec *self;
1739
1740 self = GST_AMC_VIDEO_DEC (decoder);
1741 self->last_upstream_ts = 0;
1742 self->drained = TRUE;
1743 self->downstream_flow_ret = GST_FLOW_OK;
1744 self->started = FALSE;
1745 self->flushing = TRUE;
1746
1747 return TRUE;
1748 }
1749
1750 static gboolean
gst_amc_video_dec_stop(GstVideoDecoder * decoder)1751 gst_amc_video_dec_stop (GstVideoDecoder * decoder)
1752 {
1753 GstAmcVideoDec *self;
1754 GError *err = NULL;
1755
1756 self = GST_AMC_VIDEO_DEC (decoder);
1757 GST_DEBUG_OBJECT (self, "Stopping decoder");
1758 self->flushing = TRUE;
1759 if (self->started) {
1760 gst_amc_codec_flush (self->codec, &err);
1761 if (err)
1762 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
1763 gst_amc_codec_stop (self->codec, &err);
1764 if (err)
1765 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
1766 self->started = FALSE;
1767 }
1768 gst_pad_stop_task (GST_VIDEO_DECODER_SRC_PAD (decoder));
1769
1770 self->downstream_flow_ret = GST_FLOW_FLUSHING;
1771 self->drained = TRUE;
1772 g_mutex_lock (&self->drain_lock);
1773 self->draining = FALSE;
1774 g_cond_broadcast (&self->drain_cond);
1775 g_mutex_unlock (&self->drain_lock);
1776 g_free (self->codec_data);
1777 self->codec_data_size = 0;
1778 if (self->input_state)
1779 gst_video_codec_state_unref (self->input_state);
1780 self->input_state = NULL;
1781 GST_DEBUG_OBJECT (self, "Stopped decoder");
1782 return TRUE;
1783 }
1784
1785 static gboolean
gst_amc_video_dec_set_format(GstVideoDecoder * decoder,GstVideoCodecState * state)1786 gst_amc_video_dec_set_format (GstVideoDecoder * decoder,
1787 GstVideoCodecState * state)
1788 {
1789 GstAmcVideoDec *self;
1790 GstAmcVideoDecClass *klass;
1791 GstAmcFormat *format;
1792 const gchar *mime;
1793 gboolean is_format_change = FALSE;
1794 gboolean needs_disable = FALSE;
1795 gchar *format_string;
1796 guint8 *codec_data = NULL;
1797 gsize codec_data_size = 0;
1798 GError *err = NULL;
1799
1800 self = GST_AMC_VIDEO_DEC (decoder);
1801 klass = GST_AMC_VIDEO_DEC_GET_CLASS (self);
1802
1803 GST_DEBUG_OBJECT (self, "Setting new caps %" GST_PTR_FORMAT, state->caps);
1804
1805 /* Check if the caps change is a real format change or if only irrelevant
1806 * parts of the caps have changed or nothing at all.
1807 */
1808 is_format_change |= self->color_format_info.width != state->info.width;
1809 is_format_change |= self->color_format_info.height != state->info.height;
1810 if (state->codec_data) {
1811 GstMapInfo cminfo;
1812
1813 gst_buffer_map (state->codec_data, &cminfo, GST_MAP_READ);
1814 codec_data = g_memdup2 (cminfo.data, cminfo.size);
1815 codec_data_size = cminfo.size;
1816
1817 is_format_change |= (!self->codec_data
1818 || self->codec_data_size != codec_data_size
1819 || memcmp (self->codec_data, codec_data, codec_data_size) != 0);
1820 gst_buffer_unmap (state->codec_data, &cminfo);
1821 } else if (self->codec_data) {
1822 is_format_change |= TRUE;
1823 }
1824
1825 needs_disable = self->started;
1826
1827 /* If the component is not started and a real format change happens
1828 * we have to restart the component. If no real format change
1829 * happened we can just exit here.
1830 */
1831 if (needs_disable && !is_format_change) {
1832 g_free (codec_data);
1833 codec_data = NULL;
1834 codec_data_size = 0;
1835
1836 /* Framerate or something minor changed */
1837 self->input_state_changed = TRUE;
1838 if (self->input_state)
1839 gst_video_codec_state_unref (self->input_state);
1840 self->input_state = gst_video_codec_state_ref (state);
1841 GST_DEBUG_OBJECT (self,
1842 "Already running and caps did not change the format");
1843 return TRUE;
1844 }
1845
1846 if (needs_disable && is_format_change) {
1847 gst_amc_video_dec_drain (self);
1848 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1849 gst_amc_video_dec_stop (GST_VIDEO_DECODER (self));
1850 GST_VIDEO_DECODER_STREAM_LOCK (self);
1851 gst_amc_video_dec_close (GST_VIDEO_DECODER (self));
1852 if (!gst_amc_video_dec_open (GST_VIDEO_DECODER (self))) {
1853 GST_ERROR_OBJECT (self, "Failed to open codec again");
1854 return FALSE;
1855 }
1856
1857 if (!gst_amc_video_dec_start (GST_VIDEO_DECODER (self))) {
1858 GST_ERROR_OBJECT (self, "Failed to start codec again");
1859 }
1860 }
1861 /* srcpad task is not running at this point */
1862 if (self->input_state)
1863 gst_video_codec_state_unref (self->input_state);
1864 self->input_state = NULL;
1865
1866 g_free (self->codec_data);
1867 self->codec_data = codec_data;
1868 self->codec_data_size = codec_data_size;
1869
1870 mime = caps_to_mime (state->caps);
1871 if (!mime) {
1872 GST_ERROR_OBJECT (self, "Failed to convert caps to mime");
1873 return FALSE;
1874 }
1875
1876 format =
1877 gst_amc_format_new_video (mime, state->info.width, state->info.height,
1878 &err);
1879 if (!format) {
1880 GST_ERROR_OBJECT (self, "Failed to create video format");
1881 GST_ELEMENT_ERROR_FROM_ERROR (self, err);
1882 return FALSE;
1883 }
1884
1885 /* FIXME: This buffer needs to be valid until the codec is stopped again */
1886 if (self->codec_data) {
1887 gst_amc_format_set_buffer (format, "csd-0", self->codec_data,
1888 self->codec_data_size, &err);
1889 if (err)
1890 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
1891 }
1892
1893 {
1894 gboolean downstream_supports_gl = FALSE;
1895 GstVideoDecoder *decoder = GST_VIDEO_DECODER (self);
1896 GstPad *src_pad = GST_VIDEO_DECODER_SRC_PAD (decoder);
1897 GstCaps *templ_caps = gst_pad_get_pad_template_caps (src_pad);
1898 GstCaps *downstream_caps = gst_pad_peer_query_caps (src_pad, templ_caps);
1899
1900 gst_caps_unref (templ_caps);
1901
1902 if (downstream_caps) {
1903 guint i, n;
1904 GstStaticCaps static_caps =
1905 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE_WITH_FEATURES
1906 (GST_CAPS_FEATURE_MEMORY_GL_MEMORY, "RGBA"));
1907 GstCaps *gl_memory_caps = gst_static_caps_get (&static_caps);
1908
1909 GST_DEBUG_OBJECT (self, "Available downstream caps: %" GST_PTR_FORMAT,
1910 downstream_caps);
1911
1912 /* Check if downstream caps supports
1913 * video/x-raw(memory:GLMemory),format=RGBA */
1914 n = gst_caps_get_size (downstream_caps);
1915 for (i = 0; i < n; i++) {
1916 GstCaps *caps = NULL;
1917 GstStructure *structure = gst_caps_get_structure (downstream_caps, i);
1918 GstCapsFeatures *features = gst_caps_get_features (downstream_caps, i);
1919
1920 caps = gst_caps_new_full (gst_structure_copy (structure), NULL);
1921 if (!caps)
1922 continue;
1923
1924 gst_caps_set_features (caps, 0, gst_caps_features_copy (features));
1925
1926 if (gst_caps_can_intersect (caps, gl_memory_caps)) {
1927 downstream_supports_gl = TRUE;
1928 }
1929
1930 gst_caps_unref (caps);
1931 if (downstream_supports_gl)
1932 break;
1933 }
1934
1935 gst_caps_unref (gl_memory_caps);
1936
1937 /* If video/x-raw(memory:GLMemory),format=RGBA is supported,
1938 * update the video decoder output state accordingly and negotiate */
1939 if (downstream_supports_gl) {
1940 GstVideoCodecState *output_state = NULL;
1941 GstVideoCodecState *prev_output_state = NULL;
1942
1943 prev_output_state = gst_video_decoder_get_output_state (decoder);
1944
1945 output_state =
1946 gst_video_decoder_set_output_state (decoder, GST_VIDEO_FORMAT_RGBA,
1947 state->info.width, state->info.height, state);
1948
1949 if (output_state->caps) {
1950 gst_caps_unref (output_state->caps);
1951 }
1952
1953 output_state->caps = gst_video_info_to_caps (&output_state->info);
1954 gst_caps_set_features (output_state->caps, 0,
1955 gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_GL_MEMORY, NULL));
1956
1957 /* gst_amc_video_dec_decide_allocation will update
1958 * self->downstream_supports_gl */
1959 if (!gst_video_decoder_negotiate (decoder)) {
1960 GST_ERROR_OBJECT (self, "Failed to negotiate");
1961
1962 /* Rollback output state changes */
1963 if (prev_output_state) {
1964 output_state->info = prev_output_state->info;
1965 gst_caps_replace (&output_state->caps, prev_output_state->caps);
1966 } else {
1967 gst_video_info_init (&output_state->info);
1968 gst_caps_replace (&output_state->caps, NULL);
1969 }
1970 }
1971 if (prev_output_state) {
1972 gst_video_codec_state_unref (prev_output_state);
1973 }
1974 }
1975
1976 gst_caps_unref (downstream_caps);
1977 }
1978 }
1979
1980 GST_INFO_OBJECT (self, "GL output: %s",
1981 self->downstream_supports_gl ? "enabled" : "disabled");
1982
1983 if (klass->codec_info->gl_output_only && !self->downstream_supports_gl) {
1984 GST_ERROR_OBJECT (self,
1985 "Codec only supports GL output but downstream does not");
1986 return FALSE;
1987 }
1988
1989 if (self->downstream_supports_gl && self->surface) {
1990 self->codec_config = AMC_CODEC_CONFIG_WITH_SURFACE;
1991 } else if (self->downstream_supports_gl && !self->surface) {
1992 int ret = TRUE;
1993
1994 self->surface = gst_amc_codec_new_surface_texture (&err);
1995 if (!self->surface) {
1996 GST_ELEMENT_ERROR_FROM_ERROR (self, err);
1997 return FALSE;
1998 }
1999
2000 if (!gst_amc_surface_texture_set_on_frame_available_callback
2001 (self->surface, gst_amc_video_dec_on_frame_available, self, &err)) {
2002 ret = FALSE;
2003 goto done;
2004 }
2005
2006 self->codec_config = AMC_CODEC_CONFIG_WITH_SURFACE;
2007
2008 done:
2009 if (!ret) {
2010 GST_ELEMENT_ERROR_FROM_ERROR (self, err);
2011 return FALSE;
2012 }
2013 } else {
2014 self->codec_config = AMC_CODEC_CONFIG_WITHOUT_SURFACE;
2015 }
2016
2017 format_string = gst_amc_format_to_string (format, &err);
2018 if (err)
2019 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
2020 GST_DEBUG_OBJECT (self, "Configuring codec with format: %s",
2021 GST_STR_NULL (format_string));
2022 g_free (format_string);
2023
2024 if (!gst_amc_codec_configure (self->codec, format, self->surface, &err)) {
2025 GST_ERROR_OBJECT (self, "Failed to configure codec");
2026 GST_ELEMENT_ERROR_FROM_ERROR (self, err);
2027 return FALSE;
2028 }
2029
2030 gst_amc_format_free (format);
2031
2032 if (!gst_amc_codec_start (self->codec, &err)) {
2033 GST_ERROR_OBJECT (self, "Failed to start codec");
2034 GST_ELEMENT_ERROR_FROM_ERROR (self, err);
2035 return FALSE;
2036 }
2037
2038 self->started = TRUE;
2039 self->input_state = gst_video_codec_state_ref (state);
2040 self->input_state_changed = TRUE;
2041
2042 /* Start the srcpad loop again */
2043 self->flushing = FALSE;
2044 self->downstream_flow_ret = GST_FLOW_OK;
2045 gst_pad_start_task (GST_VIDEO_DECODER_SRC_PAD (self),
2046 (GstTaskFunction) gst_amc_video_dec_loop, decoder, NULL);
2047
2048 return TRUE;
2049 }
2050
2051 static gboolean
gst_amc_video_dec_flush(GstVideoDecoder * decoder)2052 gst_amc_video_dec_flush (GstVideoDecoder * decoder)
2053 {
2054 GstAmcVideoDec *self;
2055 GError *err = NULL;
2056
2057 self = GST_AMC_VIDEO_DEC (decoder);
2058
2059 GST_DEBUG_OBJECT (self, "Flushing decoder");
2060
2061 if (!self->started) {
2062 GST_DEBUG_OBJECT (self, "Codec not started yet");
2063 return TRUE;
2064 }
2065
2066 self->flushing = TRUE;
2067 /* Wait until the srcpad loop is finished,
2068 * unlock GST_VIDEO_DECODER_STREAM_LOCK to prevent deadlocks
2069 * caused by using this lock from inside the loop function */
2070 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
2071 GST_PAD_STREAM_LOCK (GST_VIDEO_DECODER_SRC_PAD (self));
2072 GST_PAD_STREAM_UNLOCK (GST_VIDEO_DECODER_SRC_PAD (self));
2073 GST_VIDEO_DECODER_STREAM_LOCK (self);
2074 gst_amc_codec_flush (self->codec, &err);
2075 if (err)
2076 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
2077 self->flushing = FALSE;
2078
2079 /* Start the srcpad loop again */
2080 self->last_upstream_ts = 0;
2081 self->drained = TRUE;
2082 self->downstream_flow_ret = GST_FLOW_OK;
2083 gst_pad_start_task (GST_VIDEO_DECODER_SRC_PAD (self),
2084 (GstTaskFunction) gst_amc_video_dec_loop, decoder, NULL);
2085
2086 GST_DEBUG_OBJECT (self, "Flushed decoder");
2087
2088 return TRUE;
2089 }
2090
2091 static GstFlowReturn
gst_amc_video_dec_handle_frame(GstVideoDecoder * decoder,GstVideoCodecFrame * frame)2092 gst_amc_video_dec_handle_frame (GstVideoDecoder * decoder,
2093 GstVideoCodecFrame * frame)
2094 {
2095 GstAmcVideoDec *self;
2096 gint idx;
2097 GstAmcBuffer *buf;
2098 GstAmcBufferInfo buffer_info;
2099 guint offset = 0;
2100 GstClockTime timestamp, duration, timestamp_offset = 0;
2101 GstMapInfo minfo;
2102 GError *err = NULL;
2103
2104 memset (&minfo, 0, sizeof (minfo));
2105
2106 self = GST_AMC_VIDEO_DEC (decoder);
2107
2108 GST_DEBUG_OBJECT (self, "Handling frame");
2109
2110 if (!self->started) {
2111 GST_ERROR_OBJECT (self, "Codec not started yet");
2112 gst_video_codec_frame_unref (frame);
2113 return GST_FLOW_NOT_NEGOTIATED;
2114 }
2115
2116 if (self->flushing)
2117 goto flushing;
2118
2119 if (self->downstream_flow_ret != GST_FLOW_OK)
2120 goto downstream_error;
2121
2122 timestamp = frame->pts;
2123 duration = frame->duration;
2124
2125 gst_buffer_map (frame->input_buffer, &minfo, GST_MAP_READ);
2126
2127 while (offset < minfo.size) {
2128 /* Make sure to release the base class stream lock, otherwise
2129 * _loop() can't call _finish_frame() and we might block forever
2130 * because no input buffers are released */
2131 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
2132 /* Wait at most 100ms here, some codecs don't fail dequeueing if
2133 * the codec is flushing, causing deadlocks during shutdown */
2134 idx = gst_amc_codec_dequeue_input_buffer (self->codec, 100000, &err);
2135 GST_VIDEO_DECODER_STREAM_LOCK (self);
2136
2137 if (idx < 0) {
2138 if (self->flushing || self->downstream_flow_ret == GST_FLOW_FLUSHING) {
2139 g_clear_error (&err);
2140 goto flushing;
2141 }
2142
2143 switch (idx) {
2144 case INFO_TRY_AGAIN_LATER:
2145 GST_DEBUG_OBJECT (self, "Dequeueing input buffer timed out");
2146 continue; /* next try */
2147 break;
2148 case G_MININT:
2149 GST_ERROR_OBJECT (self, "Failed to dequeue input buffer");
2150 goto dequeue_error;
2151 default:
2152 g_assert_not_reached ();
2153 break;
2154 }
2155
2156 continue;
2157 }
2158
2159 if (self->flushing) {
2160 memset (&buffer_info, 0, sizeof (buffer_info));
2161 gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info, NULL);
2162 goto flushing;
2163 }
2164
2165 if (self->downstream_flow_ret != GST_FLOW_OK) {
2166 memset (&buffer_info, 0, sizeof (buffer_info));
2167 gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info, &err);
2168 if (err && !self->flushing)
2169 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
2170 g_clear_error (&err);
2171 goto downstream_error;
2172 }
2173
2174 /* Now handle the frame */
2175
2176 /* Copy the buffer content in chunks of size as requested
2177 * by the port */
2178 buf = gst_amc_codec_get_input_buffer (self->codec, idx, &err);
2179 if (err)
2180 goto failed_to_get_input_buffer;
2181 else if (!buf)
2182 goto got_null_input_buffer;
2183
2184 memset (&buffer_info, 0, sizeof (buffer_info));
2185 buffer_info.offset = 0;
2186 buffer_info.size = MIN (minfo.size - offset, buf->size);
2187 gst_amc_buffer_set_position_and_limit (buf, NULL, buffer_info.offset,
2188 buffer_info.size);
2189
2190 orc_memcpy (buf->data, minfo.data + offset, buffer_info.size);
2191
2192 gst_amc_buffer_free (buf);
2193 buf = NULL;
2194
2195 /* Interpolate timestamps if we're passing the buffer
2196 * in multiple chunks */
2197 if (offset != 0 && duration != GST_CLOCK_TIME_NONE) {
2198 timestamp_offset = gst_util_uint64_scale (offset, duration, minfo.size);
2199 }
2200
2201 if (timestamp != GST_CLOCK_TIME_NONE) {
2202 buffer_info.presentation_time_us =
2203 gst_util_uint64_scale (timestamp + timestamp_offset, 1, GST_USECOND);
2204 self->last_upstream_ts = timestamp + timestamp_offset;
2205 }
2206 if (duration != GST_CLOCK_TIME_NONE)
2207 self->last_upstream_ts += duration;
2208
2209 if (offset == 0) {
2210 BufferIdentification *id =
2211 buffer_identification_new (timestamp + timestamp_offset);
2212 if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame))
2213 buffer_info.flags |= BUFFER_FLAG_SYNC_FRAME;
2214 gst_video_codec_frame_set_user_data (frame, id,
2215 (GDestroyNotify) buffer_identification_free);
2216 }
2217
2218 offset += buffer_info.size;
2219 GST_DEBUG_OBJECT (self,
2220 "Queueing buffer %d: size %d time %" G_GINT64_FORMAT
2221 " flags 0x%08x", idx, buffer_info.size,
2222 buffer_info.presentation_time_us, buffer_info.flags);
2223 if (!gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info,
2224 &err)) {
2225 if (self->flushing) {
2226 g_clear_error (&err);
2227 goto flushing;
2228 }
2229 goto queue_error;
2230 }
2231 self->drained = FALSE;
2232 }
2233
2234 gst_buffer_unmap (frame->input_buffer, &minfo);
2235 gst_video_codec_frame_unref (frame);
2236
2237 return self->downstream_flow_ret;
2238
2239 downstream_error:
2240 {
2241 GST_ERROR_OBJECT (self, "Downstream returned %s",
2242 gst_flow_get_name (self->downstream_flow_ret));
2243 if (minfo.data)
2244 gst_buffer_unmap (frame->input_buffer, &minfo);
2245 gst_video_codec_frame_unref (frame);
2246 return self->downstream_flow_ret;
2247 }
2248 failed_to_get_input_buffer:
2249 {
2250 GST_ELEMENT_ERROR_FROM_ERROR (self, err);
2251 if (minfo.data)
2252 gst_buffer_unmap (frame->input_buffer, &minfo);
2253 gst_video_codec_frame_unref (frame);
2254 return GST_FLOW_ERROR;
2255 }
2256 got_null_input_buffer:
2257 {
2258 GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL),
2259 ("Got no input buffer"));
2260 if (minfo.data)
2261 gst_buffer_unmap (frame->input_buffer, &minfo);
2262 gst_video_codec_frame_unref (frame);
2263 return GST_FLOW_ERROR;
2264 }
2265 dequeue_error:
2266 {
2267 GST_ELEMENT_ERROR_FROM_ERROR (self, err);
2268 if (minfo.data)
2269 gst_buffer_unmap (frame->input_buffer, &minfo);
2270 gst_video_codec_frame_unref (frame);
2271 return GST_FLOW_ERROR;
2272 }
2273 queue_error:
2274 {
2275 GST_VIDEO_DECODER_ERROR_FROM_ERROR (self, err);
2276 if (minfo.data)
2277 gst_buffer_unmap (frame->input_buffer, &minfo);
2278 gst_video_codec_frame_unref (frame);
2279 return GST_FLOW_ERROR;
2280 }
2281 flushing:
2282 {
2283 GST_DEBUG_OBJECT (self, "Flushing -- returning FLUSHING");
2284 if (minfo.data)
2285 gst_buffer_unmap (frame->input_buffer, &minfo);
2286 gst_video_codec_frame_unref (frame);
2287 return GST_FLOW_FLUSHING;
2288 }
2289 }
2290
2291 static GstFlowReturn
gst_amc_video_dec_finish(GstVideoDecoder * decoder)2292 gst_amc_video_dec_finish (GstVideoDecoder * decoder)
2293 {
2294 GstAmcVideoDec *self;
2295
2296 self = GST_AMC_VIDEO_DEC (decoder);
2297
2298 return gst_amc_video_dec_drain (self);
2299 }
2300
2301 static GstFlowReturn
gst_amc_video_dec_drain(GstAmcVideoDec * self)2302 gst_amc_video_dec_drain (GstAmcVideoDec * self)
2303 {
2304 GstFlowReturn ret;
2305 gint idx;
2306 GError *err = NULL;
2307
2308 GST_DEBUG_OBJECT (self, "Draining codec");
2309 if (!self->started) {
2310 GST_DEBUG_OBJECT (self, "Codec not started yet");
2311 return GST_FLOW_OK;
2312 }
2313
2314 /* Don't send drain buffer twice, this doesn't work */
2315 if (self->drained) {
2316 GST_DEBUG_OBJECT (self, "Codec is drained already");
2317 return GST_FLOW_OK;
2318 }
2319
2320 /* Make sure to release the base class stream lock, otherwise
2321 * _loop() can't call _finish_frame() and we might block forever
2322 * because no input buffers are released */
2323 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
2324 /* Send an EOS buffer to the component and let the base
2325 * class drop the EOS event. We will send it later when
2326 * the EOS buffer arrives on the output port.
2327 * Wait at most 0.5s here. */
2328 idx = gst_amc_codec_dequeue_input_buffer (self->codec, 500000, &err);
2329 GST_VIDEO_DECODER_STREAM_LOCK (self);
2330
2331 if (idx >= 0) {
2332 GstAmcBuffer *buf;
2333 GstAmcBufferInfo buffer_info;
2334
2335 buf = gst_amc_codec_get_input_buffer (self->codec, idx, &err);
2336 if (buf) {
2337 GST_VIDEO_DECODER_STREAM_UNLOCK (self);
2338 g_mutex_lock (&self->drain_lock);
2339 self->draining = TRUE;
2340
2341 memset (&buffer_info, 0, sizeof (buffer_info));
2342 buffer_info.size = 0;
2343 buffer_info.presentation_time_us =
2344 gst_util_uint64_scale (self->last_upstream_ts, 1, GST_USECOND);
2345 buffer_info.flags |= BUFFER_FLAG_END_OF_STREAM;
2346
2347 gst_amc_buffer_set_position_and_limit (buf, NULL, 0, 0);
2348 gst_amc_buffer_free (buf);
2349 buf = NULL;
2350
2351 if (gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info,
2352 &err)) {
2353 GST_DEBUG_OBJECT (self, "Waiting until codec is drained");
2354 g_cond_wait (&self->drain_cond, &self->drain_lock);
2355 GST_DEBUG_OBJECT (self, "Drained codec");
2356 ret = GST_FLOW_OK;
2357 } else {
2358 GST_ERROR_OBJECT (self, "Failed to queue input buffer");
2359 if (self->flushing) {
2360 g_clear_error (&err);
2361 ret = GST_FLOW_FLUSHING;
2362 } else {
2363 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
2364 ret = GST_FLOW_ERROR;
2365 }
2366 }
2367
2368 self->drained = TRUE;
2369 self->draining = FALSE;
2370 g_mutex_unlock (&self->drain_lock);
2371 GST_VIDEO_DECODER_STREAM_LOCK (self);
2372 } else {
2373 GST_ERROR_OBJECT (self, "Failed to get buffer for EOS: %d", idx);
2374 if (err)
2375 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
2376 ret = GST_FLOW_ERROR;
2377 }
2378 } else {
2379 GST_ERROR_OBJECT (self, "Failed to acquire buffer for EOS: %d", idx);
2380 if (err)
2381 GST_ELEMENT_WARNING_FROM_ERROR (self, err);
2382 ret = GST_FLOW_ERROR;
2383 }
2384
2385 return ret;
2386 }
2387
2388 static gboolean
gst_amc_video_dec_src_query(GstVideoDecoder * bdec,GstQuery * query)2389 gst_amc_video_dec_src_query (GstVideoDecoder * bdec, GstQuery * query)
2390 {
2391 GstAmcVideoDec *self = GST_AMC_VIDEO_DEC (bdec);
2392
2393 switch (GST_QUERY_TYPE (query)) {
2394 case GST_QUERY_CONTEXT:
2395 {
2396 if (gst_gl_handle_context_query ((GstElement *) self, query,
2397 self->gl_display, self->gl_context, self->other_gl_context))
2398 return TRUE;
2399 break;
2400 }
2401 default:
2402 break;
2403 }
2404
2405 return GST_VIDEO_DECODER_CLASS (parent_class)->src_query (bdec, query);
2406 }
2407
2408 static gboolean
_caps_are_rgba_with_gl_memory(GstCaps * caps)2409 _caps_are_rgba_with_gl_memory (GstCaps * caps)
2410 {
2411 GstVideoInfo info;
2412 GstCapsFeatures *features;
2413
2414 if (!caps)
2415 return FALSE;
2416
2417 if (!gst_video_info_from_caps (&info, caps))
2418 return FALSE;
2419
2420 if (info.finfo->format != GST_VIDEO_FORMAT_RGBA)
2421 return FALSE;
2422
2423 if (!(features = gst_caps_get_features (caps, 0)))
2424 return FALSE;
2425
2426 return gst_caps_features_contains (features,
2427 GST_CAPS_FEATURE_MEMORY_GL_MEMORY);
2428 }
2429
2430 static gboolean
_find_local_gl_context(GstAmcVideoDec * self)2431 _find_local_gl_context (GstAmcVideoDec * self)
2432 {
2433 if (gst_gl_query_local_gl_context (GST_ELEMENT (self), GST_PAD_SRC,
2434 &self->gl_context))
2435 return TRUE;
2436 return FALSE;
2437 }
2438
2439 static gboolean
gst_amc_video_dec_decide_allocation(GstVideoDecoder * bdec,GstQuery * query)2440 gst_amc_video_dec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query)
2441 {
2442 GstAmcVideoDec *self = GST_AMC_VIDEO_DEC (bdec);
2443 gboolean need_pool = FALSE;
2444 GstCaps *caps = NULL;
2445 // GError *error = NULL;
2446
2447 if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query))
2448 return FALSE;
2449
2450 self->downstream_supports_gl = FALSE;
2451 gst_query_parse_allocation (query, &caps, &need_pool);
2452 if (_caps_are_rgba_with_gl_memory (caps)) {
2453
2454 if (!gst_gl_ensure_element_data (self, &self->gl_display,
2455 &self->other_gl_context))
2456 return FALSE;
2457
2458 if (!_find_local_gl_context (self))
2459 goto out;
2460 #if 0
2461 if (!self->gl_context) {
2462 GST_OBJECT_LOCK (self->gl_display);
2463 do {
2464 if (self->gl_context) {
2465 gst_object_unref (self->gl_context);
2466 self->gl_context = NULL;
2467 }
2468 /* just get a GL context. we don't care */
2469 self->gl_context =
2470 gst_gl_display_get_gl_context_for_thread (self->gl_display, NULL);
2471 if (!self->gl_context) {
2472 if (!gst_gl_display_create_context (self->gl_display,
2473 self->other_gl_context, &self->gl_context, &error)) {
2474 GST_OBJECT_UNLOCK (mix->display);
2475 goto context_error;
2476 }
2477 }
2478 } while (!gst_gl_display_add_context (self->gl_display,
2479 self->gl_context));
2480 GST_OBJECT_UNLOCK (self->gl_display);
2481 }
2482 #endif
2483
2484 self->downstream_supports_gl = TRUE;
2485 }
2486
2487 out:
2488 return gst_amc_video_dec_check_codec_config (self);
2489 #if 0
2490 context_error:
2491 {
2492 GST_ELEMENT_ERROR (self, RESOURCE, NOT_FOUND, ("%s", error->message),
2493 (NULL));
2494 g_clear_error (&error);
2495 return FALSE;
2496 }
2497 #endif
2498 }
2499
2500 static void
gst_amc_video_dec_on_frame_available(GstAmcSurfaceTexture * texture,gpointer user_data)2501 gst_amc_video_dec_on_frame_available (GstAmcSurfaceTexture * texture,
2502 gpointer user_data)
2503 {
2504 GstAmcVideoDec *self = (GstAmcVideoDec *) user_data;
2505
2506 /* apparently we can be called after the decoder has been closed */
2507 if (!self)
2508 return;
2509
2510 g_mutex_lock (&self->gl_lock);
2511 self->gl_ready_frame_count++;
2512 GST_LOG_OBJECT (self, "frame %u available", self->gl_ready_frame_count);
2513 g_cond_broadcast (&self->gl_cond);
2514 g_mutex_unlock (&self->gl_lock);
2515 }
2516