• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* GStreamer
2  * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
3  *
4  * This library is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Library General Public
6  * License as published by the Free Software Foundation; either
7  * version 2 of the License, or (at your option) any later version.
8  *
9  * This library is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12  * Library General Public License for more details.
13  *
14  * You should have received a copy of the GNU Library General Public
15  * License along with this library; if not, write to the
16  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17  * Boston, MA 02110-1301, USA.
18  */
19 
20 #ifdef HAVE_CONFIG_H
21 #include "config.h"
22 #endif
23 
24 #include <assert.h>
25 #include <string.h>
26 
27 #include <libavcodec/avcodec.h>
28 #include <libavutil/stereo3d.h>
29 
30 #include "gstav.h"
31 #include "gstavcodecmap.h"
32 #include "gstavutils.h"
33 #include "gstavviddec.h"
34 
35 GST_DEBUG_CATEGORY_STATIC (GST_CAT_PERFORMANCE);
36 
37 #define MAX_TS_MASK 0xff
38 
39 #define DEFAULT_LOWRES			0
40 #define DEFAULT_SKIPFRAME		0
41 #define DEFAULT_DIRECT_RENDERING	TRUE
42 #define DEFAULT_DEBUG_MV		FALSE
43 #define DEFAULT_MAX_THREADS		0
44 #define DEFAULT_OUTPUT_CORRUPT		TRUE
45 #define REQUIRED_POOL_MAX_BUFFERS       32
46 #define DEFAULT_STRIDE_ALIGN            31
47 #define DEFAULT_ALLOC_PARAM             { 0, DEFAULT_STRIDE_ALIGN, 0, 0, }
48 
49 enum
50 {
51   PROP_0,
52   PROP_LOWRES,
53   PROP_SKIPFRAME,
54   PROP_DIRECT_RENDERING,
55   PROP_DEBUG_MV,
56   PROP_MAX_THREADS,
57   PROP_OUTPUT_CORRUPT,
58   PROP_LAST
59 };
60 
61 /* A number of function prototypes are given so we can refer to them later. */
62 static void gst_ffmpegviddec_base_init (GstFFMpegVidDecClass * klass);
63 static void gst_ffmpegviddec_class_init (GstFFMpegVidDecClass * klass);
64 static void gst_ffmpegviddec_init (GstFFMpegVidDec * ffmpegdec);
65 static void gst_ffmpegviddec_finalize (GObject * object);
66 
67 static gboolean gst_ffmpegviddec_set_format (GstVideoDecoder * decoder,
68     GstVideoCodecState * state);
69 static GstFlowReturn gst_ffmpegviddec_handle_frame (GstVideoDecoder * decoder,
70     GstVideoCodecFrame * frame);
71 static gboolean gst_ffmpegviddec_start (GstVideoDecoder * decoder);
72 static gboolean gst_ffmpegviddec_stop (GstVideoDecoder * decoder);
73 static gboolean gst_ffmpegviddec_flush (GstVideoDecoder * decoder);
74 static gboolean gst_ffmpegviddec_decide_allocation (GstVideoDecoder * decoder,
75     GstQuery * query);
76 static gboolean gst_ffmpegviddec_propose_allocation (GstVideoDecoder * decoder,
77     GstQuery * query);
78 
79 static void gst_ffmpegviddec_set_property (GObject * object,
80     guint prop_id, const GValue * value, GParamSpec * pspec);
81 static void gst_ffmpegviddec_get_property (GObject * object,
82     guint prop_id, GValue * value, GParamSpec * pspec);
83 
84 static gboolean gst_ffmpegviddec_negotiate (GstFFMpegVidDec * ffmpegdec,
85     AVCodecContext * context, AVFrame * picture);
86 
87 /* some sort of bufferpool handling, but different */
88 static int gst_ffmpegviddec_get_buffer2 (AVCodecContext * context,
89     AVFrame * picture, int flags);
90 
91 static GstFlowReturn gst_ffmpegviddec_finish (GstVideoDecoder * decoder);
92 static GstFlowReturn gst_ffmpegviddec_drain (GstVideoDecoder * decoder);
93 
94 static gboolean picture_changed (GstFFMpegVidDec * ffmpegdec,
95     AVFrame * picture);
96 static gboolean context_changed (GstFFMpegVidDec * ffmpegdec,
97     AVCodecContext * context);
98 
99 #define GST_FFDEC_PARAMS_QDATA g_quark_from_static_string("avdec-params")
100 
101 static GstElementClass *parent_class = NULL;
102 
103 #define GST_FFMPEGVIDDEC_TYPE_LOWRES (gst_ffmpegviddec_lowres_get_type())
104 static GType
gst_ffmpegviddec_lowres_get_type(void)105 gst_ffmpegviddec_lowres_get_type (void)
106 {
107   static GType ffmpegdec_lowres_type = 0;
108 
109   if (!ffmpegdec_lowres_type) {
110     static const GEnumValue ffmpegdec_lowres[] = {
111       {0, "0", "full"},
112       {1, "1", "1/2-size"},
113       {2, "2", "1/4-size"},
114       {0, NULL, NULL},
115     };
116 
117     ffmpegdec_lowres_type =
118         g_enum_register_static ("GstLibAVVidDecLowres", ffmpegdec_lowres);
119   }
120 
121   return ffmpegdec_lowres_type;
122 }
123 
124 #define GST_FFMPEGVIDDEC_TYPE_SKIPFRAME (gst_ffmpegviddec_skipframe_get_type())
125 static GType
gst_ffmpegviddec_skipframe_get_type(void)126 gst_ffmpegviddec_skipframe_get_type (void)
127 {
128   static GType ffmpegdec_skipframe_type = 0;
129 
130   if (!ffmpegdec_skipframe_type) {
131     static const GEnumValue ffmpegdec_skipframe[] = {
132       {0, "0", "Skip nothing"},
133       {1, "1", "Skip B-frames"},
134       {2, "2", "Skip IDCT/Dequantization"},
135       {5, "5", "Skip everything"},
136       {0, NULL, NULL},
137     };
138 
139     ffmpegdec_skipframe_type =
140         g_enum_register_static ("GstLibAVVidDecSkipFrame", ffmpegdec_skipframe);
141   }
142 
143   return ffmpegdec_skipframe_type;
144 }
145 
146 static void
gst_ffmpegviddec_base_init(GstFFMpegVidDecClass * klass)147 gst_ffmpegviddec_base_init (GstFFMpegVidDecClass * klass)
148 {
149   GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
150   GstPadTemplate *sinktempl, *srctempl;
151   GstCaps *sinkcaps, *srccaps;
152   AVCodec *in_plugin;
153   gchar *longname, *description;
154 
155   in_plugin =
156       (AVCodec *) g_type_get_qdata (G_OBJECT_CLASS_TYPE (klass),
157       GST_FFDEC_PARAMS_QDATA);
158   g_assert (in_plugin != NULL);
159 
160   /* construct the element details struct */
161   longname = g_strdup_printf ("libav %s decoder", in_plugin->long_name);
162   description = g_strdup_printf ("libav %s decoder", in_plugin->name);
163   gst_element_class_set_metadata (element_class, longname,
164       "Codec/Decoder/Video", description,
165       "Wim Taymans <wim.taymans@gmail.com>, "
166       "Ronald Bultje <rbultje@ronald.bitfreak.net>, "
167       "Edward Hervey <bilboed@bilboed.com>");
168   g_free (longname);
169   g_free (description);
170 
171   /* get the caps */
172   sinkcaps = gst_ffmpeg_codecid_to_caps (in_plugin->id, NULL, FALSE);
173   if (!sinkcaps) {
174     GST_DEBUG ("Couldn't get sink caps for decoder '%s'", in_plugin->name);
175     sinkcaps = gst_caps_new_empty_simple ("unknown/unknown");
176   }
177   srccaps = gst_ffmpeg_codectype_to_video_caps (NULL,
178       in_plugin->id, FALSE, in_plugin);
179   if (!srccaps) {
180     GST_DEBUG ("Couldn't get source caps for decoder '%s'", in_plugin->name);
181     srccaps = gst_caps_from_string ("video/x-raw");
182   }
183 
184   /* pad templates */
185   sinktempl = gst_pad_template_new ("sink", GST_PAD_SINK,
186       GST_PAD_ALWAYS, sinkcaps);
187   srctempl = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, srccaps);
188 
189   gst_element_class_add_pad_template (element_class, srctempl);
190   gst_element_class_add_pad_template (element_class, sinktempl);
191 
192   gst_caps_unref (sinkcaps);
193   gst_caps_unref (srccaps);
194 
195   klass->in_plugin = in_plugin;
196 }
197 
198 static void
gst_ffmpegviddec_class_init(GstFFMpegVidDecClass * klass)199 gst_ffmpegviddec_class_init (GstFFMpegVidDecClass * klass)
200 {
201   GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
202   GstVideoDecoderClass *viddec_class = GST_VIDEO_DECODER_CLASS (klass);
203   int caps;
204 
205   parent_class = g_type_class_peek_parent (klass);
206 
207   gobject_class->finalize = gst_ffmpegviddec_finalize;
208 
209   gobject_class->set_property = gst_ffmpegviddec_set_property;
210   gobject_class->get_property = gst_ffmpegviddec_get_property;
211 
212   g_object_class_install_property (gobject_class, PROP_SKIPFRAME,
213       g_param_spec_enum ("skip-frame", "Skip frames",
214           "Which types of frames to skip during decoding",
215           GST_FFMPEGVIDDEC_TYPE_SKIPFRAME, 0,
216           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
217   g_object_class_install_property (gobject_class, PROP_LOWRES,
218       g_param_spec_enum ("lowres", "Low resolution",
219           "At which resolution to decode images",
220           GST_FFMPEGVIDDEC_TYPE_LOWRES, 0,
221           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
222   g_object_class_install_property (gobject_class, PROP_DIRECT_RENDERING,
223       g_param_spec_boolean ("direct-rendering", "Direct Rendering",
224           "Enable direct rendering", DEFAULT_DIRECT_RENDERING,
225           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
226   g_object_class_install_property (gobject_class, PROP_DEBUG_MV,
227       g_param_spec_boolean ("debug-mv", "Debug motion vectors",
228           "Whether libav should print motion vectors on top of the image",
229           DEFAULT_DEBUG_MV, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
230   g_object_class_install_property (gobject_class, PROP_OUTPUT_CORRUPT,
231       g_param_spec_boolean ("output-corrupt", "Output corrupt buffers",
232           "Whether libav should output frames even if corrupted",
233           DEFAULT_OUTPUT_CORRUPT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
234 
235   caps = klass->in_plugin->capabilities;
236   if (caps & (AV_CODEC_CAP_FRAME_THREADS | AV_CODEC_CAP_SLICE_THREADS)) {
237     g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_MAX_THREADS,
238         g_param_spec_int ("max-threads", "Maximum decode threads",
239             "Maximum number of worker threads to spawn. (0 = auto)",
240             0, G_MAXINT, DEFAULT_MAX_THREADS,
241             G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
242   }
243 
244   viddec_class->set_format = gst_ffmpegviddec_set_format;
245   viddec_class->handle_frame = gst_ffmpegviddec_handle_frame;
246   viddec_class->start = gst_ffmpegviddec_start;
247   viddec_class->stop = gst_ffmpegviddec_stop;
248   viddec_class->flush = gst_ffmpegviddec_flush;
249   viddec_class->finish = gst_ffmpegviddec_finish;
250   viddec_class->drain = gst_ffmpegviddec_drain;
251   viddec_class->decide_allocation = gst_ffmpegviddec_decide_allocation;
252   viddec_class->propose_allocation = gst_ffmpegviddec_propose_allocation;
253 
254   GST_DEBUG_CATEGORY_GET (GST_CAT_PERFORMANCE, "GST_PERFORMANCE");
255 }
256 
257 static void
gst_ffmpegviddec_init(GstFFMpegVidDec * ffmpegdec)258 gst_ffmpegviddec_init (GstFFMpegVidDec * ffmpegdec)
259 {
260   GstFFMpegVidDecClass *klass =
261       (GstFFMpegVidDecClass *) G_OBJECT_GET_CLASS (ffmpegdec);
262 
263   /* some ffmpeg data */
264   ffmpegdec->context = avcodec_alloc_context3 (klass->in_plugin);
265   ffmpegdec->context->opaque = ffmpegdec;
266   ffmpegdec->picture = av_frame_alloc ();
267   ffmpegdec->opened = FALSE;
268   ffmpegdec->skip_frame = ffmpegdec->lowres = 0;
269   ffmpegdec->direct_rendering = DEFAULT_DIRECT_RENDERING;
270   ffmpegdec->debug_mv = DEFAULT_DEBUG_MV;
271   ffmpegdec->max_threads = DEFAULT_MAX_THREADS;
272   ffmpegdec->output_corrupt = DEFAULT_OUTPUT_CORRUPT;
273 
274   GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (ffmpegdec));
275   gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
276       (ffmpegdec), TRUE);
277 
278   gst_video_decoder_set_needs_format (GST_VIDEO_DECODER (ffmpegdec), TRUE);
279 }
280 
281 static void
gst_ffmpegviddec_finalize(GObject * object)282 gst_ffmpegviddec_finalize (GObject * object)
283 {
284   GstFFMpegVidDec *ffmpegdec = (GstFFMpegVidDec *) object;
285 
286   av_frame_free (&ffmpegdec->picture);
287 
288   if (ffmpegdec->context != NULL) {
289     gst_ffmpeg_avcodec_close (ffmpegdec->context);
290     av_free (ffmpegdec->context);
291     ffmpegdec->context = NULL;
292   }
293 
294   G_OBJECT_CLASS (parent_class)->finalize (object);
295 }
296 
297 static void
gst_ffmpegviddec_context_set_flags(AVCodecContext * context,guint flags,gboolean enable)298 gst_ffmpegviddec_context_set_flags (AVCodecContext * context, guint flags,
299     gboolean enable)
300 {
301   g_return_if_fail (context != NULL);
302 
303   if (enable)
304     context->flags |= flags;
305   else
306     context->flags &= ~flags;
307 }
308 
309 /* with LOCK */
310 static gboolean
gst_ffmpegviddec_close(GstFFMpegVidDec * ffmpegdec,gboolean reset)311 gst_ffmpegviddec_close (GstFFMpegVidDec * ffmpegdec, gboolean reset)
312 {
313   GstFFMpegVidDecClass *oclass;
314   guint i;
315 
316   oclass = (GstFFMpegVidDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec));
317 
318   GST_LOG_OBJECT (ffmpegdec, "closing ffmpeg codec");
319 
320   gst_caps_replace (&ffmpegdec->last_caps, NULL);
321 
322   gst_ffmpeg_avcodec_close (ffmpegdec->context);
323   ffmpegdec->opened = FALSE;
324 
325   for (i = 0; i < G_N_ELEMENTS (ffmpegdec->stride); i++)
326     ffmpegdec->stride[i] = -1;
327 
328   gst_buffer_replace (&ffmpegdec->palette, NULL);
329 
330   if (ffmpegdec->context->extradata) {
331     av_free (ffmpegdec->context->extradata);
332     ffmpegdec->context->extradata = NULL;
333   }
334   if (reset) {
335     if (avcodec_get_context_defaults3 (ffmpegdec->context,
336             oclass->in_plugin) < 0) {
337       GST_DEBUG_OBJECT (ffmpegdec, "Failed to set context defaults");
338       return FALSE;
339     }
340     ffmpegdec->context->opaque = ffmpegdec;
341   }
342   return TRUE;
343 }
344 
345 /* with LOCK */
346 static gboolean
gst_ffmpegviddec_open(GstFFMpegVidDec * ffmpegdec)347 gst_ffmpegviddec_open (GstFFMpegVidDec * ffmpegdec)
348 {
349   GstFFMpegVidDecClass *oclass;
350   guint i;
351 
352   oclass = (GstFFMpegVidDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec));
353 
354   if (gst_ffmpeg_avcodec_open (ffmpegdec->context, oclass->in_plugin) < 0)
355     goto could_not_open;
356 
357   for (i = 0; i < G_N_ELEMENTS (ffmpegdec->stride); i++)
358     ffmpegdec->stride[i] = -1;
359 
360   ffmpegdec->opened = TRUE;
361 
362   GST_LOG_OBJECT (ffmpegdec, "Opened libav codec %s, id %d",
363       oclass->in_plugin->name, oclass->in_plugin->id);
364 
365   gst_ffmpegviddec_context_set_flags (ffmpegdec->context,
366       AV_CODEC_FLAG_OUTPUT_CORRUPT, ffmpegdec->output_corrupt);
367 
368   return TRUE;
369 
370   /* ERRORS */
371 could_not_open:
372   {
373     gst_ffmpegviddec_close (ffmpegdec, TRUE);
374     GST_DEBUG_OBJECT (ffmpegdec, "avdec_%s: Failed to open libav codec",
375         oclass->in_plugin->name);
376     return FALSE;
377   }
378 }
379 
380 static void
gst_ffmpegviddec_get_palette(GstFFMpegVidDec * ffmpegdec,GstVideoCodecState * state)381 gst_ffmpegviddec_get_palette (GstFFMpegVidDec * ffmpegdec,
382     GstVideoCodecState * state)
383 {
384   GstStructure *str = gst_caps_get_structure (state->caps, 0);
385   const GValue *palette_v;
386   GstBuffer *palette;
387 
388   /* do we have a palette? */
389   if ((palette_v = gst_structure_get_value (str, "palette_data"))) {
390     palette = gst_value_get_buffer (palette_v);
391     GST_DEBUG ("got palette data %p", palette);
392     if (gst_buffer_get_size (palette) >= AVPALETTE_SIZE) {
393       gst_buffer_replace (&ffmpegdec->palette, palette);
394     }
395   }
396 }
397 
398 
399 static gboolean
gst_ffmpegviddec_set_format(GstVideoDecoder * decoder,GstVideoCodecState * state)400 gst_ffmpegviddec_set_format (GstVideoDecoder * decoder,
401     GstVideoCodecState * state)
402 {
403   GstFFMpegVidDec *ffmpegdec;
404   GstFFMpegVidDecClass *oclass;
405   GstClockTime latency = GST_CLOCK_TIME_NONE;
406   gboolean ret = FALSE;
407 
408   ffmpegdec = (GstFFMpegVidDec *) decoder;
409   oclass = (GstFFMpegVidDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec));
410 
411   if (ffmpegdec->last_caps != NULL &&
412       gst_caps_is_equal (ffmpegdec->last_caps, state->caps)) {
413     return TRUE;
414   }
415 
416   GST_DEBUG_OBJECT (ffmpegdec, "setcaps called");
417 
418   GST_OBJECT_LOCK (ffmpegdec);
419   /* stupid check for VC1 */
420   if ((oclass->in_plugin->id == AV_CODEC_ID_WMV3) ||
421       (oclass->in_plugin->id == AV_CODEC_ID_VC1))
422     oclass->in_plugin->id = gst_ffmpeg_caps_to_codecid (state->caps, NULL);
423 
424   /* close old session */
425   if (ffmpegdec->opened) {
426     GST_OBJECT_UNLOCK (ffmpegdec);
427     gst_ffmpegviddec_finish (decoder);
428     GST_OBJECT_LOCK (ffmpegdec);
429     if (!gst_ffmpegviddec_close (ffmpegdec, TRUE)) {
430       GST_OBJECT_UNLOCK (ffmpegdec);
431       return FALSE;
432     }
433     ffmpegdec->pic_pix_fmt = 0;
434     ffmpegdec->pic_width = 0;
435     ffmpegdec->pic_height = 0;
436     ffmpegdec->pic_par_n = 0;
437     ffmpegdec->pic_par_d = 0;
438     ffmpegdec->pic_interlaced = 0;
439     ffmpegdec->pic_field_order = 0;
440     ffmpegdec->pic_field_order_changed = FALSE;
441     ffmpegdec->ctx_ticks = 0;
442     ffmpegdec->ctx_time_n = 0;
443     ffmpegdec->ctx_time_d = 0;
444     ffmpegdec->cur_multiview_mode = GST_VIDEO_MULTIVIEW_MODE_NONE;
445     ffmpegdec->cur_multiview_flags = GST_VIDEO_MULTIVIEW_FLAGS_NONE;
446   }
447 
448   gst_caps_replace (&ffmpegdec->last_caps, state->caps);
449 
450   /* set buffer functions */
451   ffmpegdec->context->get_buffer2 = gst_ffmpegviddec_get_buffer2;
452   ffmpegdec->context->draw_horiz_band = NULL;
453 
454   /* reset coded_width/_height to prevent it being reused from last time when
455    * the codec is opened again, causing a mismatch and possible
456    * segfault/corruption. (Common scenario when renegotiating caps) */
457   ffmpegdec->context->coded_width = 0;
458   ffmpegdec->context->coded_height = 0;
459 
460   GST_LOG_OBJECT (ffmpegdec, "size %dx%d", ffmpegdec->context->width,
461       ffmpegdec->context->height);
462 
463   /* FIXME : Create a method that takes GstVideoCodecState instead */
464   /* get size and so */
465   gst_ffmpeg_caps_with_codecid (oclass->in_plugin->id,
466       oclass->in_plugin->type, state->caps, ffmpegdec->context);
467 
468   GST_LOG_OBJECT (ffmpegdec, "size after %dx%d", ffmpegdec->context->width,
469       ffmpegdec->context->height);
470 
471   gst_ffmpegviddec_get_palette (ffmpegdec, state);
472 
473   if (!ffmpegdec->context->time_base.den || !ffmpegdec->context->time_base.num) {
474     GST_DEBUG_OBJECT (ffmpegdec, "forcing 25/1 framerate");
475     ffmpegdec->context->time_base.num = 1;
476     ffmpegdec->context->time_base.den = 25;
477   }
478 
479   /* workaround encoder bugs */
480   ffmpegdec->context->workaround_bugs |= FF_BUG_AUTODETECT;
481   ffmpegdec->context->err_recognition = 1;
482 
483   /* for slow cpus */
484   ffmpegdec->context->lowres = ffmpegdec->lowres;
485   ffmpegdec->context->skip_frame = ffmpegdec->skip_frame;
486 
487   /* ffmpeg can draw motion vectors on top of the image (not every decoder
488    * supports it) */
489   ffmpegdec->context->debug_mv = ffmpegdec->debug_mv;
490 
491   {
492     GstQuery *query;
493     gboolean is_live;
494 
495     if (ffmpegdec->max_threads == 0) {
496       if (!(oclass->in_plugin->capabilities & AV_CODEC_CAP_AUTO_THREADS))
497         ffmpegdec->context->thread_count = gst_ffmpeg_auto_max_threads ();
498       else
499         ffmpegdec->context->thread_count = 0;
500     } else
501       ffmpegdec->context->thread_count = ffmpegdec->max_threads;
502 
503     query = gst_query_new_latency ();
504     is_live = FALSE;
505     /* Check if upstream is live. If it isn't we can enable frame based
506      * threading, which is adding latency */
507     if (gst_pad_peer_query (GST_VIDEO_DECODER_SINK_PAD (ffmpegdec), query)) {
508       gst_query_parse_latency (query, &is_live, NULL, NULL);
509     }
510     gst_query_unref (query);
511 
512     if (is_live)
513       ffmpegdec->context->thread_type = FF_THREAD_SLICE;
514     else
515       ffmpegdec->context->thread_type = FF_THREAD_SLICE | FF_THREAD_FRAME;
516   }
517 
518   /* open codec - we don't select an output pix_fmt yet,
519    * simply because we don't know! We only get it
520    * during playback... */
521   if (!gst_ffmpegviddec_open (ffmpegdec))
522     goto open_failed;
523 
524   if (ffmpegdec->input_state)
525     gst_video_codec_state_unref (ffmpegdec->input_state);
526   ffmpegdec->input_state = gst_video_codec_state_ref (state);
527 
528   if (ffmpegdec->input_state->info.fps_n) {
529     GstVideoInfo *info = &ffmpegdec->input_state->info;
530     latency = gst_util_uint64_scale_ceil (
531         (ffmpegdec->context->has_b_frames) * GST_SECOND, info->fps_d,
532         info->fps_n);
533   }
534 
535   ret = TRUE;
536 
537 done:
538   GST_OBJECT_UNLOCK (ffmpegdec);
539 
540   if (GST_CLOCK_TIME_IS_VALID (latency))
541     gst_video_decoder_set_latency (decoder, latency, latency);
542 
543   return ret;
544 
545   /* ERRORS */
546 open_failed:
547   {
548     GST_DEBUG_OBJECT (ffmpegdec, "Failed to open");
549     goto done;
550   }
551 }
552 
553 typedef struct
554 {
555   GstFFMpegVidDec *ffmpegdec;
556   GstVideoCodecFrame *frame;
557   gboolean mapped;
558   GstVideoFrame vframe;
559   GstBuffer *buffer;
560   AVBufferRef *avbuffer;
561 } GstFFMpegVidDecVideoFrame;
562 
563 static GstFFMpegVidDecVideoFrame *
gst_ffmpegviddec_video_frame_new(GstFFMpegVidDec * ffmpegdec,GstVideoCodecFrame * frame)564 gst_ffmpegviddec_video_frame_new (GstFFMpegVidDec * ffmpegdec,
565     GstVideoCodecFrame * frame)
566 {
567   GstFFMpegVidDecVideoFrame *dframe;
568 
569   dframe = g_slice_new0 (GstFFMpegVidDecVideoFrame);
570   dframe->ffmpegdec = ffmpegdec;
571   dframe->frame = frame;
572 
573   GST_DEBUG_OBJECT (ffmpegdec, "new video frame %p", dframe);
574 
575   return dframe;
576 }
577 
578 static void
gst_ffmpegviddec_video_frame_free(GstFFMpegVidDec * ffmpegdec,GstFFMpegVidDecVideoFrame * frame)579 gst_ffmpegviddec_video_frame_free (GstFFMpegVidDec * ffmpegdec,
580     GstFFMpegVidDecVideoFrame * frame)
581 {
582   GST_DEBUG_OBJECT (ffmpegdec, "free video frame %p", frame);
583 
584   if (frame->mapped)
585     gst_video_frame_unmap (&frame->vframe);
586   gst_video_decoder_release_frame (GST_VIDEO_DECODER (ffmpegdec), frame->frame);
587   gst_buffer_replace (&frame->buffer, NULL);
588   if (frame->avbuffer) {
589     av_buffer_unref (&frame->avbuffer);
590   }
591   g_slice_free (GstFFMpegVidDecVideoFrame, frame);
592 }
593 
594 static void
dummy_free_buffer(void * opaque,uint8_t * data)595 dummy_free_buffer (void *opaque, uint8_t * data)
596 {
597   GstFFMpegVidDecVideoFrame *frame = opaque;
598 
599   gst_ffmpegviddec_video_frame_free (frame->ffmpegdec, frame);
600 }
601 
602 /* This function prepares the pool configuration for direct rendering. To use
603  * this method, the codec should support direct rendering and the pool should
604  * support video meta and video alignment */
605 static void
gst_ffmpegvideodec_prepare_dr_pool(GstFFMpegVidDec * ffmpegdec,GstBufferPool * pool,GstVideoInfo * info,GstStructure * config)606 gst_ffmpegvideodec_prepare_dr_pool (GstFFMpegVidDec * ffmpegdec,
607     GstBufferPool * pool, GstVideoInfo * info, GstStructure * config)
608 {
609   GstAllocationParams params;
610   GstVideoAlignment align;
611   GstAllocator *allocator = NULL;
612   gint width, height;
613   gint linesize_align[4];
614   gint i;
615   gsize max_align;
616 
617   width = GST_VIDEO_INFO_WIDTH (info);
618   height = GST_VIDEO_INFO_HEIGHT (info);
619 
620   /* let ffmpeg find the alignment and padding */
621   avcodec_align_dimensions2 (ffmpegdec->context, &width, &height,
622       linesize_align);
623 
624   align.padding_top = 0;
625   align.padding_left = 0;
626   align.padding_right = width - GST_VIDEO_INFO_WIDTH (info);
627   align.padding_bottom = height - GST_VIDEO_INFO_HEIGHT (info);
628 
629   /* add extra padding to match libav buffer allocation sizes */
630   align.padding_bottom++;
631 
632   gst_buffer_pool_config_get_allocator (config, &allocator, &params);
633 
634   max_align = DEFAULT_STRIDE_ALIGN;
635   max_align |= params.align;
636 
637   for (i = 0; i < GST_VIDEO_MAX_PLANES; i++) {
638     if (linesize_align[i] > 0)
639       max_align |= linesize_align[i] - 1;
640   }
641 
642   for (i = 0; i < GST_VIDEO_MAX_PLANES; i++)
643     align.stride_align[i] = max_align;
644 
645   params.align = max_align;
646 
647   gst_buffer_pool_config_set_allocator (config, allocator, &params);
648 
649   GST_DEBUG_OBJECT (ffmpegdec, "aligned dimension %dx%d -> %dx%d "
650       "padding t:%u l:%u r:%u b:%u, stride_align %d:%d:%d:%d",
651       GST_VIDEO_INFO_WIDTH (info),
652       GST_VIDEO_INFO_HEIGHT (info), width, height, align.padding_top,
653       align.padding_left, align.padding_right, align.padding_bottom,
654       align.stride_align[0], align.stride_align[1], align.stride_align[2],
655       align.stride_align[3]);
656 
657   gst_buffer_pool_config_add_option (config,
658       GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT);
659   gst_buffer_pool_config_set_video_alignment (config, &align);
660 }
661 
662 static void
gst_ffmpegviddec_ensure_internal_pool(GstFFMpegVidDec * ffmpegdec,AVFrame * picture)663 gst_ffmpegviddec_ensure_internal_pool (GstFFMpegVidDec * ffmpegdec,
664     AVFrame * picture)
665 {
666   GstAllocationParams params = DEFAULT_ALLOC_PARAM;
667   GstVideoInfo info;
668   GstVideoFormat format;
669   GstCaps *caps;
670   GstStructure *config;
671   guint i;
672 
673   if (ffmpegdec->internal_pool != NULL &&
674       ffmpegdec->pool_width == picture->width &&
675       ffmpegdec->pool_height == picture->height &&
676       ffmpegdec->pool_format == picture->format)
677     return;
678 
679   GST_DEBUG_OBJECT (ffmpegdec, "Updating internal pool (%i, %i)",
680       picture->width, picture->height);
681 
682   format = gst_ffmpeg_pixfmt_to_videoformat (picture->format);
683   gst_video_info_set_format (&info, format, picture->width, picture->height);
684 
685   /* If we have not yet been negotiated, a NONE format here would
686    * result in invalid initial dimension alignments, and potential
687    * out of bounds writes.
688    */
689   ffmpegdec->context->pix_fmt = picture->format;
690 
691   for (i = 0; i < G_N_ELEMENTS (ffmpegdec->stride); i++)
692     ffmpegdec->stride[i] = -1;
693 
694   if (ffmpegdec->internal_pool)
695     gst_object_unref (ffmpegdec->internal_pool);
696 
697   ffmpegdec->internal_pool = gst_video_buffer_pool_new ();
698   config = gst_buffer_pool_get_config (ffmpegdec->internal_pool);
699 
700   caps = gst_video_info_to_caps (&info);
701   gst_buffer_pool_config_set_params (config, caps, info.size, 2, 0);
702   gst_buffer_pool_config_set_allocator (config, NULL, &params);
703   gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
704 
705   gst_ffmpegvideodec_prepare_dr_pool (ffmpegdec,
706       ffmpegdec->internal_pool, &info, config);
707   /* generic video pool never fails */
708   gst_buffer_pool_set_config (ffmpegdec->internal_pool, config);
709   gst_caps_unref (caps);
710 
711   gst_buffer_pool_set_active (ffmpegdec->internal_pool, TRUE);
712 
713   /* Remember pool size so we can detect changes */
714   ffmpegdec->pool_width = picture->width;
715   ffmpegdec->pool_height = picture->height;
716   ffmpegdec->pool_format = picture->format;
717   ffmpegdec->pool_info = info;
718 }
719 
720 static gboolean
gst_ffmpegviddec_can_direct_render(GstFFMpegVidDec * ffmpegdec)721 gst_ffmpegviddec_can_direct_render (GstFFMpegVidDec * ffmpegdec)
722 {
723   GstFFMpegVidDecClass *oclass;
724 
725   if (!ffmpegdec->direct_rendering)
726     return FALSE;
727 
728   oclass = (GstFFMpegVidDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec));
729   return ((oclass->in_plugin->capabilities & AV_CODEC_CAP_DR1) ==
730       AV_CODEC_CAP_DR1);
731 }
732 
733 /* called when ffmpeg wants us to allocate a buffer to write the decoded frame
734  * into. We try to give it memory from our pool */
735 static int
gst_ffmpegviddec_get_buffer2(AVCodecContext * context,AVFrame * picture,int flags)736 gst_ffmpegviddec_get_buffer2 (AVCodecContext * context, AVFrame * picture,
737     int flags)
738 {
739   GstVideoCodecFrame *frame;
740   GstFFMpegVidDecVideoFrame *dframe;
741   GstFFMpegVidDec *ffmpegdec;
742   guint c;
743   GstFlowReturn ret;
744   int create_buffer_flags = 0;
745 
746   ffmpegdec = (GstFFMpegVidDec *) context->opaque;
747 
748   GST_DEBUG_OBJECT (ffmpegdec, "getting buffer picture %p", picture);
749 
750   /* apply the last info we have seen to this picture, when we get the
751    * picture back from ffmpeg we can use this to correctly timestamp the output
752    * buffer */
753   GST_DEBUG_OBJECT (ffmpegdec, "opaque value SN %d",
754       (gint32) picture->reordered_opaque);
755 
756   frame =
757       gst_video_decoder_get_frame (GST_VIDEO_DECODER (ffmpegdec),
758       picture->reordered_opaque);
759   if (G_UNLIKELY (frame == NULL))
760     goto no_frame;
761 
762   /* now it has a buffer allocated, so it is real and will also
763    * be _released */
764   GST_VIDEO_CODEC_FRAME_FLAG_UNSET (frame,
765       GST_VIDEO_CODEC_FRAME_FLAG_DECODE_ONLY);
766 
767   if (G_UNLIKELY (frame->output_buffer != NULL))
768     goto duplicate_frame;
769 
770   /* GstFFMpegVidDecVideoFrame receives the frame ref */
771   if (picture->opaque) {
772     dframe = picture->opaque;
773     dframe->frame = frame;
774   } else {
775     picture->opaque = dframe =
776         gst_ffmpegviddec_video_frame_new (ffmpegdec, frame);
777   }
778 
779   GST_DEBUG_OBJECT (ffmpegdec, "storing opaque %p", dframe);
780 
781   if (!gst_ffmpegviddec_can_direct_render (ffmpegdec))
782     goto no_dr;
783 
784   gst_ffmpegviddec_ensure_internal_pool (ffmpegdec, picture);
785 
786   ret = gst_buffer_pool_acquire_buffer (ffmpegdec->internal_pool,
787       &frame->output_buffer, NULL);
788   if (ret != GST_FLOW_OK)
789     goto alloc_failed;
790 
791   /* piggy-backed alloc'ed on the frame,
792    * and there was much rejoicing and we are grateful.
793    * Now take away buffer from frame, we will give it back later when decoded.
794    * This allows multiple request for a buffer per frame; unusual but possible. */
795   gst_buffer_replace (&dframe->buffer, frame->output_buffer);
796   gst_buffer_replace (&frame->output_buffer, NULL);
797 
798   /* Fill avpicture */
799   if (!gst_video_frame_map (&dframe->vframe, &ffmpegdec->pool_info,
800           dframe->buffer, GST_MAP_READWRITE))
801     goto map_failed;
802   dframe->mapped = TRUE;
803 
804   for (c = 0; c < AV_NUM_DATA_POINTERS; c++) {
805     if (c < GST_VIDEO_INFO_N_PLANES (&ffmpegdec->pool_info)) {
806       picture->data[c] = GST_VIDEO_FRAME_PLANE_DATA (&dframe->vframe, c);
807       picture->linesize[c] = GST_VIDEO_FRAME_PLANE_STRIDE (&dframe->vframe, c);
808 
809       if (ffmpegdec->stride[c] == -1)
810         ffmpegdec->stride[c] = picture->linesize[c];
811 
812       /* libav does not allow stride changes, decide allocation should check
813        * before replacing the internal pool with a downstream pool.
814        * https://bugzilla.gnome.org/show_bug.cgi?id=704769
815        * https://bugzilla.libav.org/show_bug.cgi?id=556
816        */
817       g_assert (picture->linesize[c] == ffmpegdec->stride[c]);
818     } else {
819       picture->data[c] = NULL;
820       picture->linesize[c] = 0;
821     }
822     GST_LOG_OBJECT (ffmpegdec, "linesize %d, data %p", picture->linesize[c],
823         picture->data[c]);
824   }
825 
826   if ((flags & AV_GET_BUFFER_FLAG_REF) == AV_GET_BUFFER_FLAG_REF) {
827     /* decoder might reuse this AVFrame and it would result to no more
828      * get_buffer() call if the AVFrame's AVBuffer is writable
829      * (meaning that the refcount of AVBuffer == 1).
830      * To enforce get_buffer() for the every output frame, set read-only flag here
831      */
832     create_buffer_flags = AV_BUFFER_FLAG_READONLY;
833   }
834   picture->buf[0] = av_buffer_create (NULL,
835       0, dummy_free_buffer, dframe, create_buffer_flags);
836 
837   GST_LOG_OBJECT (ffmpegdec, "returned frame %p", dframe->buffer);
838 
839   return 0;
840 
841 no_dr:
842   {
843     int c;
844     int ret = avcodec_default_get_buffer2 (context, picture, flags);
845 
846     GST_LOG_OBJECT (ffmpegdec, "direct rendering disabled, fallback alloc");
847 
848     for (c = 0; c < AV_NUM_DATA_POINTERS; c++) {
849       ffmpegdec->stride[c] = picture->linesize[c];
850     }
851     /* Wrap our buffer around the default one to be able to have a callback
852      * when our data can be freed. Just putting our data into the first free
853      * buffer might not work if there are too many allocated already
854      */
855     if (picture->buf[0]) {
856       dframe->avbuffer = picture->buf[0];
857       picture->buf[0] =
858           av_buffer_create (picture->buf[0]->data, picture->buf[0]->size,
859           dummy_free_buffer, dframe, 0);
860     } else {
861       picture->buf[0] =
862           av_buffer_create (NULL, 0, dummy_free_buffer, dframe, 0);
863     }
864 
865     return ret;
866   }
867 alloc_failed:
868   {
869     GST_ELEMENT_ERROR (ffmpegdec, RESOURCE, FAILED,
870         ("Unable to allocate memory"),
871         ("The downstream pool failed to allocated buffer."));
872     return -1;
873   }
874 map_failed:
875   {
876     GST_ELEMENT_ERROR (ffmpegdec, RESOURCE, OPEN_READ_WRITE,
877         ("Cannot access memory for read and write operation."),
878         ("The video memory allocated from downstream pool could not mapped for"
879             "read and write."));
880     return -1;
881   }
882 duplicate_frame:
883   {
884     GST_WARNING_OBJECT (ffmpegdec, "already alloc'ed output buffer for frame");
885     gst_video_codec_frame_unref (frame);
886     return -1;
887   }
888 no_frame:
889   {
890     GST_WARNING_OBJECT (ffmpegdec, "Couldn't get codec frame !");
891     return -1;
892   }
893 }
894 
895 static gboolean
picture_changed(GstFFMpegVidDec * ffmpegdec,AVFrame * picture)896 picture_changed (GstFFMpegVidDec * ffmpegdec, AVFrame * picture)
897 {
898   gint pic_field_order = 0;
899 
900   if (picture->interlaced_frame) {
901     if (picture->repeat_pict)
902       pic_field_order |= GST_VIDEO_BUFFER_FLAG_RFF;
903     if (picture->top_field_first)
904       pic_field_order |= GST_VIDEO_BUFFER_FLAG_TFF;
905   }
906 
907   return !(ffmpegdec->pic_width == picture->width
908       && ffmpegdec->pic_height == picture->height
909       && ffmpegdec->pic_pix_fmt == picture->format
910       && ffmpegdec->pic_par_n == picture->sample_aspect_ratio.num
911       && ffmpegdec->pic_par_d == picture->sample_aspect_ratio.den
912       && ffmpegdec->pic_interlaced == picture->interlaced_frame
913       && ffmpegdec->pic_field_order == pic_field_order
914       && ffmpegdec->cur_multiview_mode == ffmpegdec->picture_multiview_mode
915       && ffmpegdec->cur_multiview_flags == ffmpegdec->picture_multiview_flags);
916 }
917 
918 static gboolean
context_changed(GstFFMpegVidDec * ffmpegdec,AVCodecContext * context)919 context_changed (GstFFMpegVidDec * ffmpegdec, AVCodecContext * context)
920 {
921   return !(ffmpegdec->ctx_ticks == context->ticks_per_frame
922       && ffmpegdec->ctx_time_n == context->time_base.num
923       && ffmpegdec->ctx_time_d == context->time_base.den);
924 }
925 
926 static gboolean
update_video_context(GstFFMpegVidDec * ffmpegdec,AVCodecContext * context,AVFrame * picture)927 update_video_context (GstFFMpegVidDec * ffmpegdec, AVCodecContext * context,
928     AVFrame * picture)
929 {
930   gint pic_field_order = 0;
931 
932   if (picture->interlaced_frame) {
933     if (picture->repeat_pict)
934       pic_field_order |= GST_VIDEO_BUFFER_FLAG_RFF;
935     if (picture->top_field_first)
936       pic_field_order |= GST_VIDEO_BUFFER_FLAG_TFF;
937   }
938 
939   if (!picture_changed (ffmpegdec, picture)
940       && !context_changed (ffmpegdec, context))
941     return FALSE;
942 
943   GST_DEBUG_OBJECT (ffmpegdec,
944       "Renegotiating video from %dx%d@ %d:%d PAR %d/%d fps pixfmt %d to %dx%d@ %d:%d PAR %d/%d fps pixfmt %d",
945       ffmpegdec->pic_width, ffmpegdec->pic_height,
946       ffmpegdec->pic_par_n, ffmpegdec->pic_par_d,
947       ffmpegdec->ctx_time_n, ffmpegdec->ctx_time_d,
948       ffmpegdec->pic_pix_fmt,
949       picture->width, picture->height,
950       picture->sample_aspect_ratio.num,
951       picture->sample_aspect_ratio.den,
952       context->time_base.num, context->time_base.den, picture->format);
953 
954   ffmpegdec->pic_pix_fmt = picture->format;
955   ffmpegdec->pic_width = picture->width;
956   ffmpegdec->pic_height = picture->height;
957   ffmpegdec->pic_par_n = picture->sample_aspect_ratio.num;
958   ffmpegdec->pic_par_d = picture->sample_aspect_ratio.den;
959   ffmpegdec->cur_multiview_mode = ffmpegdec->picture_multiview_mode;
960   ffmpegdec->cur_multiview_flags = ffmpegdec->picture_multiview_flags;
961 
962   /* Remember if we have interlaced content and the field order changed
963    * at least once. If that happens, we must be interlace-mode=mixed
964    */
965   if (ffmpegdec->pic_field_order_changed ||
966       (ffmpegdec->pic_field_order != pic_field_order &&
967           ffmpegdec->pic_interlaced))
968     ffmpegdec->pic_field_order_changed = TRUE;
969 
970   ffmpegdec->pic_field_order = pic_field_order;
971   ffmpegdec->pic_interlaced = picture->interlaced_frame;
972 
973   if (!ffmpegdec->pic_interlaced)
974     ffmpegdec->pic_field_order_changed = FALSE;
975 
976   ffmpegdec->ctx_ticks = context->ticks_per_frame;
977   ffmpegdec->ctx_time_n = context->time_base.num;
978   ffmpegdec->ctx_time_d = context->time_base.den;
979 
980   return TRUE;
981 }
982 
983 static void
gst_ffmpegviddec_update_par(GstFFMpegVidDec * ffmpegdec,GstVideoInfo * in_info,GstVideoInfo * out_info)984 gst_ffmpegviddec_update_par (GstFFMpegVidDec * ffmpegdec,
985     GstVideoInfo * in_info, GstVideoInfo * out_info)
986 {
987   gboolean demuxer_par_set = FALSE;
988   gboolean decoder_par_set = FALSE;
989   gint demuxer_num = 1, demuxer_denom = 1;
990   gint decoder_num = 1, decoder_denom = 1;
991 
992   if (in_info->par_n && in_info->par_d) {
993     demuxer_num = in_info->par_n;
994     demuxer_denom = in_info->par_d;
995     demuxer_par_set = TRUE;
996     GST_DEBUG_OBJECT (ffmpegdec, "Demuxer PAR: %d:%d", demuxer_num,
997         demuxer_denom);
998   }
999 
1000   if (ffmpegdec->pic_par_n && ffmpegdec->pic_par_d) {
1001     decoder_num = ffmpegdec->pic_par_n;
1002     decoder_denom = ffmpegdec->pic_par_d;
1003     decoder_par_set = TRUE;
1004     GST_DEBUG_OBJECT (ffmpegdec, "Decoder PAR: %d:%d", decoder_num,
1005         decoder_denom);
1006   }
1007 
1008   if (!demuxer_par_set && !decoder_par_set)
1009     goto no_par;
1010 
1011   if (demuxer_par_set && !decoder_par_set)
1012     goto use_demuxer_par;
1013 
1014   if (decoder_par_set && !demuxer_par_set)
1015     goto use_decoder_par;
1016 
1017   /* Both the demuxer and the decoder provide a PAR. If one of
1018    * the two PARs is 1:1 and the other one is not, use the one
1019    * that is not 1:1. */
1020   if (demuxer_num == demuxer_denom && decoder_num != decoder_denom)
1021     goto use_decoder_par;
1022 
1023   if (decoder_num == decoder_denom && demuxer_num != demuxer_denom)
1024     goto use_demuxer_par;
1025 
1026   /* Both PARs are non-1:1, so use the PAR provided by the demuxer */
1027   goto use_demuxer_par;
1028 
1029 use_decoder_par:
1030   {
1031     GST_DEBUG_OBJECT (ffmpegdec,
1032         "Setting decoder provided pixel-aspect-ratio of %u:%u", decoder_num,
1033         decoder_denom);
1034     out_info->par_n = decoder_num;
1035     out_info->par_d = decoder_denom;
1036     return;
1037   }
1038 use_demuxer_par:
1039   {
1040     GST_DEBUG_OBJECT (ffmpegdec,
1041         "Setting demuxer provided pixel-aspect-ratio of %u:%u", demuxer_num,
1042         demuxer_denom);
1043     out_info->par_n = demuxer_num;
1044     out_info->par_d = demuxer_denom;
1045     return;
1046   }
1047 no_par:
1048   {
1049     GST_DEBUG_OBJECT (ffmpegdec,
1050         "Neither demuxer nor codec provide a pixel-aspect-ratio");
1051     out_info->par_n = 1;
1052     out_info->par_d = 1;
1053     return;
1054   }
1055 }
1056 
1057 static GstVideoMultiviewMode
stereo_av_to_gst(enum AVStereo3DType type)1058 stereo_av_to_gst (enum AVStereo3DType type)
1059 {
1060   switch (type) {
1061     case AV_STEREO3D_SIDEBYSIDE:
1062       return GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE;
1063     case AV_STEREO3D_TOPBOTTOM:
1064       return GST_VIDEO_MULTIVIEW_MODE_TOP_BOTTOM;
1065     case AV_STEREO3D_FRAMESEQUENCE:
1066       return GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME;
1067     case AV_STEREO3D_CHECKERBOARD:
1068       return GST_VIDEO_MULTIVIEW_MODE_CHECKERBOARD;
1069     case AV_STEREO3D_SIDEBYSIDE_QUINCUNX:
1070       return GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE_QUINCUNX;
1071     case AV_STEREO3D_LINES:
1072       return GST_VIDEO_MULTIVIEW_MODE_ROW_INTERLEAVED;
1073     case AV_STEREO3D_COLUMNS:
1074       return GST_VIDEO_MULTIVIEW_MODE_COLUMN_INTERLEAVED;
1075     default:
1076       break;
1077   }
1078 
1079   return GST_VIDEO_MULTIVIEW_MODE_NONE;
1080 }
1081 
1082 static gboolean
gst_ffmpegviddec_negotiate(GstFFMpegVidDec * ffmpegdec,AVCodecContext * context,AVFrame * picture)1083 gst_ffmpegviddec_negotiate (GstFFMpegVidDec * ffmpegdec,
1084     AVCodecContext * context, AVFrame * picture)
1085 {
1086   GstVideoFormat fmt;
1087   GstVideoInfo *in_info, *out_info;
1088   GstVideoCodecState *output_state;
1089   gint fps_n, fps_d;
1090   GstClockTime latency;
1091   GstStructure *in_s;
1092 
1093   if (!update_video_context (ffmpegdec, context, picture))
1094     return TRUE;
1095 
1096   fmt = gst_ffmpeg_pixfmt_to_videoformat (ffmpegdec->pic_pix_fmt);
1097   if (G_UNLIKELY (fmt == GST_VIDEO_FORMAT_UNKNOWN))
1098     goto unknown_format;
1099 
1100   output_state =
1101       gst_video_decoder_set_output_state (GST_VIDEO_DECODER (ffmpegdec), fmt,
1102       ffmpegdec->pic_width, ffmpegdec->pic_height, ffmpegdec->input_state);
1103   if (ffmpegdec->output_state)
1104     gst_video_codec_state_unref (ffmpegdec->output_state);
1105   ffmpegdec->output_state = output_state;
1106 
1107   in_info = &ffmpegdec->input_state->info;
1108   out_info = &ffmpegdec->output_state->info;
1109 
1110   /* set the interlaced flag */
1111   in_s = gst_caps_get_structure (ffmpegdec->input_state->caps, 0);
1112 
1113   if (!gst_structure_has_field (in_s, "interlace-mode")) {
1114     if (ffmpegdec->pic_interlaced) {
1115       if (ffmpegdec->pic_field_order_changed ||
1116           (ffmpegdec->pic_field_order & GST_VIDEO_BUFFER_FLAG_RFF)) {
1117         out_info->interlace_mode = GST_VIDEO_INTERLACE_MODE_MIXED;
1118       } else {
1119         out_info->interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
1120         if ((ffmpegdec->pic_field_order & GST_VIDEO_BUFFER_FLAG_TFF))
1121           GST_VIDEO_INFO_FIELD_ORDER (out_info) =
1122               GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST;
1123         else
1124           GST_VIDEO_INFO_FIELD_ORDER (out_info) =
1125               GST_VIDEO_FIELD_ORDER_BOTTOM_FIELD_FIRST;
1126       }
1127     } else {
1128       out_info->interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
1129     }
1130   }
1131 
1132   if (!gst_structure_has_field (in_s, "chroma-site")) {
1133     switch (context->chroma_sample_location) {
1134       case AVCHROMA_LOC_LEFT:
1135         out_info->chroma_site = GST_VIDEO_CHROMA_SITE_MPEG2;
1136         break;
1137       case AVCHROMA_LOC_CENTER:
1138         out_info->chroma_site = GST_VIDEO_CHROMA_SITE_JPEG;
1139         break;
1140       case AVCHROMA_LOC_TOPLEFT:
1141         out_info->chroma_site = GST_VIDEO_CHROMA_SITE_DV;
1142         break;
1143       case AVCHROMA_LOC_TOP:
1144         out_info->chroma_site = GST_VIDEO_CHROMA_SITE_V_COSITED;
1145         break;
1146       default:
1147         break;
1148     }
1149   }
1150 
1151   if (!gst_structure_has_field (in_s, "colorimetry")
1152       || in_info->colorimetry.primaries == GST_VIDEO_COLOR_PRIMARIES_UNKNOWN) {
1153     switch (context->color_primaries) {
1154       case AVCOL_PRI_BT709:
1155         out_info->colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_BT709;
1156         break;
1157       case AVCOL_PRI_BT470M:
1158         out_info->colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_BT470M;
1159         break;
1160       case AVCOL_PRI_BT470BG:
1161         out_info->colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_BT470BG;
1162         break;
1163       case AVCOL_PRI_SMPTE170M:
1164         out_info->colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_SMPTE170M;
1165         break;
1166       case AVCOL_PRI_SMPTE240M:
1167         out_info->colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_SMPTE240M;
1168         break;
1169       case AVCOL_PRI_FILM:
1170         out_info->colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_FILM;
1171         break;
1172       case AVCOL_PRI_BT2020:
1173         out_info->colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_BT2020;
1174         break;
1175       default:
1176         break;
1177     }
1178   }
1179 
1180   if (!gst_structure_has_field (in_s, "colorimetry")
1181       || in_info->colorimetry.transfer == GST_VIDEO_TRANSFER_UNKNOWN) {
1182     switch (context->color_trc) {
1183       case AVCOL_TRC_BT2020_10:
1184       case AVCOL_TRC_BT709:
1185       case AVCOL_TRC_SMPTE170M:
1186         out_info->colorimetry.transfer = GST_VIDEO_TRANSFER_BT709;
1187         break;
1188       case AVCOL_TRC_GAMMA22:
1189         out_info->colorimetry.transfer = GST_VIDEO_TRANSFER_GAMMA22;
1190         break;
1191       case AVCOL_TRC_GAMMA28:
1192         out_info->colorimetry.transfer = GST_VIDEO_TRANSFER_GAMMA28;
1193         break;
1194       case AVCOL_TRC_SMPTE240M:
1195         out_info->colorimetry.transfer = GST_VIDEO_TRANSFER_SMPTE240M;
1196         break;
1197       case AVCOL_TRC_LINEAR:
1198         out_info->colorimetry.transfer = GST_VIDEO_TRANSFER_GAMMA10;
1199         break;
1200       case AVCOL_TRC_LOG:
1201         out_info->colorimetry.transfer = GST_VIDEO_TRANSFER_LOG100;
1202         break;
1203       case AVCOL_TRC_LOG_SQRT:
1204         out_info->colorimetry.transfer = GST_VIDEO_TRANSFER_LOG316;
1205         break;
1206       case AVCOL_TRC_BT2020_12:
1207         out_info->colorimetry.transfer = GST_VIDEO_TRANSFER_BT2020_12;
1208         break;
1209       default:
1210         break;
1211     }
1212   }
1213 
1214   if (!gst_structure_has_field (in_s, "colorimetry")
1215       || in_info->colorimetry.matrix == GST_VIDEO_COLOR_MATRIX_UNKNOWN) {
1216     switch (context->colorspace) {
1217       case AVCOL_SPC_RGB:
1218         out_info->colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_RGB;
1219         break;
1220       case AVCOL_SPC_BT709:
1221         out_info->colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT709;
1222         break;
1223       case AVCOL_SPC_FCC:
1224         out_info->colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_FCC;
1225         break;
1226       case AVCOL_SPC_BT470BG:
1227       case AVCOL_SPC_SMPTE170M:
1228         out_info->colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
1229         break;
1230       case AVCOL_SPC_SMPTE240M:
1231         out_info->colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_SMPTE240M;
1232         break;
1233       case AVCOL_SPC_BT2020_NCL:
1234         out_info->colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT2020;
1235         break;
1236       default:
1237         break;
1238     }
1239   }
1240 
1241   if (!gst_structure_has_field (in_s, "colorimetry")
1242       || in_info->colorimetry.range == GST_VIDEO_COLOR_RANGE_UNKNOWN) {
1243     if (context->color_range == AVCOL_RANGE_JPEG) {
1244       out_info->colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255;
1245     } else {
1246       out_info->colorimetry.range = GST_VIDEO_COLOR_RANGE_16_235;
1247     }
1248   }
1249 
1250   /* try to find a good framerate */
1251   if ((in_info->fps_d && in_info->fps_n) ||
1252       GST_VIDEO_INFO_FLAG_IS_SET (in_info, GST_VIDEO_FLAG_VARIABLE_FPS)) {
1253     /* take framerate from input when it was specified (#313970) */
1254     fps_n = in_info->fps_n;
1255     fps_d = in_info->fps_d;
1256   } else {
1257     fps_n = ffmpegdec->ctx_time_d / ffmpegdec->ctx_ticks;
1258     fps_d = ffmpegdec->ctx_time_n;
1259 
1260     if (!fps_d) {
1261       GST_LOG_OBJECT (ffmpegdec, "invalid framerate: %d/0, -> %d/1", fps_n,
1262           fps_n);
1263       fps_d = 1;
1264     }
1265     if (gst_util_fraction_compare (fps_n, fps_d, 1000, 1) > 0) {
1266       GST_LOG_OBJECT (ffmpegdec, "excessive framerate: %d/%d, -> 0/1", fps_n,
1267           fps_d);
1268       fps_n = 0;
1269       fps_d = 1;
1270     }
1271   }
1272 
1273   GST_LOG_OBJECT (ffmpegdec, "setting framerate: %d/%d", fps_n, fps_d);
1274   out_info->fps_n = fps_n;
1275   out_info->fps_d = fps_d;
1276 
1277   /* calculate and update par now */
1278   gst_ffmpegviddec_update_par (ffmpegdec, in_info, out_info);
1279 
1280   GST_VIDEO_INFO_MULTIVIEW_MODE (out_info) = ffmpegdec->cur_multiview_mode;
1281   GST_VIDEO_INFO_MULTIVIEW_FLAGS (out_info) = ffmpegdec->cur_multiview_flags;
1282 
1283   if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (ffmpegdec)))
1284     goto negotiate_failed;
1285 
1286   /* The decoder is configured, we now know the true latency */
1287   if (fps_n) {
1288     latency =
1289         gst_util_uint64_scale_ceil (ffmpegdec->context->has_b_frames *
1290         GST_SECOND, fps_d, fps_n);
1291     gst_video_decoder_set_latency (GST_VIDEO_DECODER (ffmpegdec), latency,
1292         latency);
1293   }
1294 
1295   return TRUE;
1296 
1297   /* ERRORS */
1298 unknown_format:
1299   {
1300     GST_ERROR_OBJECT (ffmpegdec,
1301         "decoder requires a video format unsupported by GStreamer");
1302     return FALSE;
1303   }
1304 negotiate_failed:
1305   {
1306     /* Reset so we try again next time even if force==FALSE */
1307     ffmpegdec->pic_pix_fmt = 0;
1308     ffmpegdec->pic_width = 0;
1309     ffmpegdec->pic_height = 0;
1310     ffmpegdec->pic_par_n = 0;
1311     ffmpegdec->pic_par_d = 0;
1312     ffmpegdec->pic_interlaced = 0;
1313     ffmpegdec->pic_field_order = 0;
1314     ffmpegdec->pic_field_order_changed = FALSE;
1315     ffmpegdec->ctx_ticks = 0;
1316     ffmpegdec->ctx_time_n = 0;
1317     ffmpegdec->ctx_time_d = 0;
1318 
1319     GST_ERROR_OBJECT (ffmpegdec, "negotiation failed");
1320     return FALSE;
1321   }
1322 }
1323 
1324 /* perform qos calculations before decoding the next frame.
1325  *
1326  * Sets the skip_frame flag and if things are really bad, skips to the next
1327  * keyframe.
1328  *
1329  */
1330 static void
gst_ffmpegviddec_do_qos(GstFFMpegVidDec * ffmpegdec,GstVideoCodecFrame * frame,gboolean * mode_switch)1331 gst_ffmpegviddec_do_qos (GstFFMpegVidDec * ffmpegdec,
1332     GstVideoCodecFrame * frame, gboolean * mode_switch)
1333 {
1334   GstClockTimeDiff diff;
1335   GstSegmentFlags skip_flags =
1336       GST_VIDEO_DECODER_INPUT_SEGMENT (ffmpegdec).flags;
1337 
1338   *mode_switch = FALSE;
1339 
1340   if (frame == NULL)
1341     return;
1342 
1343   if (skip_flags & GST_SEGMENT_FLAG_TRICKMODE_KEY_UNITS) {
1344     ffmpegdec->context->skip_frame = AVDISCARD_NONKEY;
1345     *mode_switch = TRUE;
1346   } else if (skip_flags & GST_SEGMENT_FLAG_TRICKMODE) {
1347     ffmpegdec->context->skip_frame = AVDISCARD_NONREF;
1348     *mode_switch = TRUE;
1349   }
1350 
1351   if (*mode_switch == TRUE) {
1352     /* We've already switched mode, we can return straight away
1353      * without any further calculation */
1354     return;
1355   }
1356 
1357   diff =
1358       gst_video_decoder_get_max_decode_time (GST_VIDEO_DECODER (ffmpegdec),
1359       frame);
1360 
1361   /* if we don't have timing info, then we don't do QoS */
1362   if (G_UNLIKELY (diff == G_MAXINT64)) {
1363     /* Ensure the skipping strategy is the default one */
1364     ffmpegdec->context->skip_frame = ffmpegdec->skip_frame;
1365     return;
1366   }
1367 
1368   GST_DEBUG_OBJECT (ffmpegdec, "decoding time %" G_GINT64_FORMAT, diff);
1369 
1370   if (diff > 0 && ffmpegdec->context->skip_frame != AVDISCARD_DEFAULT) {
1371     ffmpegdec->context->skip_frame = AVDISCARD_DEFAULT;
1372     *mode_switch = TRUE;
1373     GST_DEBUG_OBJECT (ffmpegdec, "QOS: normal mode");
1374   }
1375 
1376   else if (diff <= 0 && ffmpegdec->context->skip_frame != AVDISCARD_NONREF) {
1377     ffmpegdec->context->skip_frame = AVDISCARD_NONREF;
1378     *mode_switch = TRUE;
1379     GST_DEBUG_OBJECT (ffmpegdec,
1380         "QOS: hurry up, diff %" G_GINT64_FORMAT " >= 0", diff);
1381   }
1382 }
1383 
1384 /* get an outbuf buffer with the current picture */
1385 static GstFlowReturn
get_output_buffer(GstFFMpegVidDec * ffmpegdec,GstVideoCodecFrame * frame)1386 get_output_buffer (GstFFMpegVidDec * ffmpegdec, GstVideoCodecFrame * frame)
1387 {
1388   GstFlowReturn ret = GST_FLOW_OK;
1389   AVFrame pic, *outpic;
1390   GstVideoFrame vframe;
1391   GstVideoInfo *info;
1392   guint c;
1393 
1394   GST_LOG_OBJECT (ffmpegdec, "get output buffer");
1395 
1396   if (!ffmpegdec->output_state)
1397     goto not_negotiated;
1398 
1399   ret =
1400       gst_video_decoder_allocate_output_frame (GST_VIDEO_DECODER (ffmpegdec),
1401       frame);
1402   if (G_UNLIKELY (ret != GST_FLOW_OK))
1403     goto alloc_failed;
1404 
1405   /* original ffmpeg code does not handle odd sizes correctly.
1406    * This patched up version does */
1407   /* Fill avpicture */
1408   info = &ffmpegdec->output_state->info;
1409   if (!gst_video_frame_map (&vframe, info, frame->output_buffer,
1410           GST_MAP_READ | GST_MAP_WRITE))
1411     goto map_failed;
1412 
1413   memset (&pic, 0, sizeof (pic));
1414   pic.format = ffmpegdec->pic_pix_fmt;
1415   pic.width = GST_VIDEO_FRAME_WIDTH (&vframe);
1416   pic.height = GST_VIDEO_FRAME_HEIGHT (&vframe);
1417   for (c = 0; c < AV_NUM_DATA_POINTERS; c++) {
1418     if (c < GST_VIDEO_INFO_N_PLANES (info)) {
1419       pic.data[c] = GST_VIDEO_FRAME_PLANE_DATA (&vframe, c);
1420       pic.linesize[c] = GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, c);
1421       GST_LOG_OBJECT (ffmpegdec, "[%i] linesize %d, data %p", c,
1422           pic.linesize[c], pic.data[c]);
1423     } else {
1424       pic.data[c] = NULL;
1425       pic.linesize[c] = 0;
1426     }
1427   }
1428 
1429   outpic = ffmpegdec->picture;
1430 
1431   if (av_frame_copy (&pic, outpic) != 0) {
1432     GST_ERROR_OBJECT (ffmpegdec, "Failed to copy output frame");
1433     ret = GST_FLOW_ERROR;
1434   }
1435 
1436   gst_video_frame_unmap (&vframe);
1437 
1438   ffmpegdec->picture->reordered_opaque = -1;
1439 
1440   return ret;
1441 
1442   /* special cases */
1443 alloc_failed:
1444   {
1445     GST_ELEMENT_ERROR (ffmpegdec, RESOURCE, FAILED,
1446         ("Unable to allocate memory"),
1447         ("The downstream pool failed to allocated buffer."));
1448     return ret;
1449   }
1450 map_failed:
1451   {
1452     GST_ELEMENT_ERROR (ffmpegdec, RESOURCE, OPEN_READ_WRITE,
1453         ("Cannot access memory for read and write operation."),
1454         ("The video memory allocated from downstream pool could not mapped for"
1455             "read and write."));
1456     return ret;
1457   }
1458 not_negotiated:
1459   {
1460     GST_DEBUG_OBJECT (ffmpegdec, "not negotiated");
1461     return GST_FLOW_NOT_NEGOTIATED;
1462   }
1463 }
1464 
1465 static void
gst_avpacket_init(AVPacket * packet,guint8 * data,guint size)1466 gst_avpacket_init (AVPacket * packet, guint8 * data, guint size)
1467 {
1468   memset (packet, 0, sizeof (AVPacket));
1469   packet->data = data;
1470   packet->size = size;
1471 }
1472 
1473 /*
1474  * Returns: whether a frame was decoded
1475  */
1476 static gboolean
gst_ffmpegviddec_video_frame(GstFFMpegVidDec * ffmpegdec,GstVideoCodecFrame * frame,GstFlowReturn * ret)1477 gst_ffmpegviddec_video_frame (GstFFMpegVidDec * ffmpegdec,
1478     GstVideoCodecFrame * frame, GstFlowReturn * ret)
1479 {
1480   gint res;
1481   gboolean got_frame = FALSE;
1482   gboolean mode_switch;
1483   GstVideoCodecFrame *out_frame;
1484   GstFFMpegVidDecVideoFrame *out_dframe;
1485   GstBufferPool *pool;
1486 
1487   *ret = GST_FLOW_OK;
1488 
1489   /* in case we skip frames */
1490   ffmpegdec->picture->pict_type = -1;
1491 
1492   /* run QoS code, we don't stop decoding the frame when we are late because
1493    * else we might skip a reference frame */
1494   gst_ffmpegviddec_do_qos (ffmpegdec, frame, &mode_switch);
1495 
1496   res = avcodec_receive_frame (ffmpegdec->context, ffmpegdec->picture);
1497 
1498   /* No frames available at this time */
1499   if (res == AVERROR (EAGAIN))
1500     goto beach;
1501   else if (res == AVERROR_EOF) {
1502     *ret = GST_FLOW_EOS;
1503     GST_DEBUG_OBJECT (ffmpegdec, "Context was entirely flushed");
1504     goto beach;
1505   } else if (res < 0) {
1506     *ret = GST_FLOW_OK;
1507     GST_WARNING_OBJECT (ffmpegdec, "Legitimate decoding error");
1508     goto beach;
1509   }
1510 
1511   got_frame = TRUE;
1512 
1513   /* get the output picture timing info again */
1514   out_dframe = ffmpegdec->picture->opaque;
1515   out_frame = gst_video_codec_frame_ref (out_dframe->frame);
1516 
1517   /* also give back a buffer allocated by the frame, if any */
1518   gst_buffer_replace (&out_frame->output_buffer, out_dframe->buffer);
1519   gst_buffer_replace (&out_dframe->buffer, NULL);
1520 
1521   /* Extract auxilliary info not stored in the main AVframe */
1522   {
1523     GstVideoInfo *in_info = &ffmpegdec->input_state->info;
1524     /* Take multiview mode from upstream if present */
1525     ffmpegdec->picture_multiview_mode = GST_VIDEO_INFO_MULTIVIEW_MODE (in_info);
1526     ffmpegdec->picture_multiview_flags =
1527         GST_VIDEO_INFO_MULTIVIEW_FLAGS (in_info);
1528 
1529     /* Otherwise, see if there's info in the frame */
1530     if (ffmpegdec->picture_multiview_mode == GST_VIDEO_MULTIVIEW_MODE_NONE) {
1531       AVFrameSideData *side_data =
1532           av_frame_get_side_data (ffmpegdec->picture, AV_FRAME_DATA_STEREO3D);
1533       if (side_data) {
1534         AVStereo3D *stereo = (AVStereo3D *) side_data->data;
1535         ffmpegdec->picture_multiview_mode = stereo_av_to_gst (stereo->type);
1536         if (stereo->flags & AV_STEREO3D_FLAG_INVERT) {
1537           ffmpegdec->picture_multiview_flags =
1538               GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST;
1539         } else {
1540           ffmpegdec->picture_multiview_flags = GST_VIDEO_MULTIVIEW_FLAGS_NONE;
1541         }
1542       }
1543     }
1544   }
1545 
1546   GST_DEBUG_OBJECT (ffmpegdec,
1547       "pts %" G_GUINT64_FORMAT " duration %" G_GUINT64_FORMAT,
1548       out_frame->pts, out_frame->duration);
1549   GST_DEBUG_OBJECT (ffmpegdec, "picture: pts %" G_GUINT64_FORMAT,
1550       (guint64) ffmpegdec->picture->pts);
1551   GST_DEBUG_OBJECT (ffmpegdec, "picture: num %d",
1552       ffmpegdec->picture->coded_picture_number);
1553   GST_DEBUG_OBJECT (ffmpegdec, "picture: display %d",
1554       ffmpegdec->picture->display_picture_number);
1555   GST_DEBUG_OBJECT (ffmpegdec, "picture: opaque %p",
1556       ffmpegdec->picture->opaque);
1557   GST_DEBUG_OBJECT (ffmpegdec, "picture: reordered opaque %" G_GUINT64_FORMAT,
1558       (guint64) ffmpegdec->picture->reordered_opaque);
1559   GST_DEBUG_OBJECT (ffmpegdec, "repeat_pict:%d",
1560       ffmpegdec->picture->repeat_pict);
1561   GST_DEBUG_OBJECT (ffmpegdec, "corrupted frame: %d",
1562       ! !(ffmpegdec->picture->flags & AV_FRAME_FLAG_CORRUPT));
1563 
1564   if (!gst_ffmpegviddec_negotiate (ffmpegdec, ffmpegdec->context,
1565           ffmpegdec->picture))
1566     goto negotiation_error;
1567 
1568   pool = gst_video_decoder_get_buffer_pool (GST_VIDEO_DECODER (ffmpegdec));
1569   if (G_UNLIKELY (out_frame->output_buffer == NULL)) {
1570     *ret = get_output_buffer (ffmpegdec, out_frame);
1571   } else if (G_UNLIKELY (out_frame->output_buffer->pool != pool)) {
1572     GstBuffer *tmp = out_frame->output_buffer;
1573     out_frame->output_buffer = NULL;
1574     *ret = get_output_buffer (ffmpegdec, out_frame);
1575     gst_buffer_unref (tmp);
1576   }
1577 #ifndef G_DISABLE_ASSERT
1578   else {
1579     GstVideoMeta *vmeta = gst_buffer_get_video_meta (out_frame->output_buffer);
1580     if (vmeta) {
1581       GstVideoInfo *info = &ffmpegdec->output_state->info;
1582       g_assert ((gint) vmeta->width == GST_VIDEO_INFO_WIDTH (info));
1583       g_assert ((gint) vmeta->height == GST_VIDEO_INFO_HEIGHT (info));
1584     }
1585   }
1586 #endif
1587   gst_object_unref (pool);
1588 
1589   if (G_UNLIKELY (*ret != GST_FLOW_OK))
1590     goto no_output;
1591 
1592   /* Mark corrupted frames as corrupted */
1593   if (ffmpegdec->picture->flags & AV_FRAME_FLAG_CORRUPT)
1594     GST_BUFFER_FLAG_SET (out_frame->output_buffer, GST_BUFFER_FLAG_CORRUPTED);
1595 
1596   if (ffmpegdec->pic_interlaced) {
1597     /* set interlaced flags */
1598     if (ffmpegdec->picture->repeat_pict)
1599       GST_BUFFER_FLAG_SET (out_frame->output_buffer, GST_VIDEO_BUFFER_FLAG_RFF);
1600     if (ffmpegdec->picture->top_field_first)
1601       GST_BUFFER_FLAG_SET (out_frame->output_buffer, GST_VIDEO_BUFFER_FLAG_TFF);
1602     if (ffmpegdec->picture->interlaced_frame)
1603       GST_BUFFER_FLAG_SET (out_frame->output_buffer,
1604           GST_VIDEO_BUFFER_FLAG_INTERLACED);
1605   }
1606 
1607   {
1608     AVFrameSideData *side_data =
1609         av_frame_get_side_data (ffmpegdec->picture, AV_FRAME_DATA_A53_CC);
1610     if (side_data) {
1611       GstVideoCaptionMeta *cc_meta = NULL;
1612       gpointer iter = NULL;
1613       gboolean found_708_raw_meta = FALSE;
1614 
1615       GST_LOG_OBJECT (ffmpegdec,
1616           "Found CC side data of type AV_FRAME_DATA_A53_CC, size %d",
1617           side_data->size);
1618       GST_MEMDUMP ("A53 CC", side_data->data, side_data->size);
1619 
1620       while ((cc_meta = (GstVideoCaptionMeta *)
1621               gst_buffer_iterate_meta_filtered (out_frame->input_buffer, &iter,
1622                   GST_VIDEO_CAPTION_META_API_TYPE))) {
1623         if (cc_meta->caption_type != GST_VIDEO_CAPTION_TYPE_CEA708_RAW)
1624           continue;
1625         found_708_raw_meta = TRUE;
1626         break;
1627       }
1628 
1629       /* do not add CEA 708 caption meta if it already exists */
1630       if (!found_708_raw_meta) {
1631         out_frame->output_buffer =
1632             gst_buffer_make_writable (out_frame->output_buffer);
1633         gst_buffer_add_video_caption_meta (out_frame->output_buffer,
1634             GST_VIDEO_CAPTION_TYPE_CEA708_RAW, side_data->data,
1635             side_data->size);
1636       } else {
1637         GST_LOG_OBJECT (ffmpegdec,
1638             "CEA 708 caption meta already exists: will not add new caption meta");
1639       }
1640     }
1641   }
1642 
1643   /* cleaning time */
1644   /* so we decoded this frame, frames preceding it in decoding order
1645    * that still do not have a buffer allocated seem rather useless,
1646    * and can be discarded, due to e.g. misparsed bogus frame
1647    * or non-keyframe in skipped decoding, ...
1648    * In any case, not likely to be seen again, so discard those,
1649    * before they pile up and/or mess with timestamping */
1650   {
1651     GList *l, *ol;
1652     GstVideoDecoder *dec = GST_VIDEO_DECODER (ffmpegdec);
1653     gboolean old = TRUE;
1654 
1655     ol = l = gst_video_decoder_get_frames (dec);
1656     while (l) {
1657       GstVideoCodecFrame *tmp = l->data;
1658 
1659       if (tmp == frame)
1660         old = FALSE;
1661 
1662       if (old && GST_VIDEO_CODEC_FRAME_IS_DECODE_ONLY (tmp)) {
1663         GST_LOG_OBJECT (dec,
1664             "discarding ghost frame %p (#%d) PTS:%" GST_TIME_FORMAT " DTS:%"
1665             GST_TIME_FORMAT, tmp, tmp->system_frame_number,
1666             GST_TIME_ARGS (tmp->pts), GST_TIME_ARGS (tmp->dts));
1667         /* drop extra ref and remove from frame list */
1668         gst_video_decoder_release_frame (dec, tmp);
1669       } else {
1670         /* drop extra ref we got */
1671         gst_video_codec_frame_unref (tmp);
1672       }
1673       l = l->next;
1674     }
1675     g_list_free (ol);
1676   }
1677 
1678   av_frame_unref (ffmpegdec->picture);
1679 
1680   /* FIXME: Ideally we would remap the buffer read-only now before pushing but
1681    * libav might still have a reference to it!
1682    */
1683   *ret =
1684       gst_video_decoder_finish_frame (GST_VIDEO_DECODER (ffmpegdec), out_frame);
1685 
1686 beach:
1687   GST_DEBUG_OBJECT (ffmpegdec, "return flow %s, got frame: %d",
1688       gst_flow_get_name (*ret), got_frame);
1689   return got_frame;
1690 
1691   /* special cases */
1692 no_output:
1693   {
1694     GST_DEBUG_OBJECT (ffmpegdec, "no output buffer");
1695     gst_video_decoder_drop_frame (GST_VIDEO_DECODER (ffmpegdec), out_frame);
1696     goto beach;
1697   }
1698 
1699 negotiation_error:
1700   {
1701     if (GST_PAD_IS_FLUSHING (GST_VIDEO_DECODER_SRC_PAD (ffmpegdec))) {
1702       *ret = GST_FLOW_FLUSHING;
1703       goto beach;
1704     }
1705     GST_WARNING_OBJECT (ffmpegdec, "Error negotiating format");
1706     *ret = GST_FLOW_NOT_NEGOTIATED;
1707     goto beach;
1708   }
1709 }
1710 
1711 
1712  /* Returns: Whether a frame was decoded */
1713 static gboolean
gst_ffmpegviddec_frame(GstFFMpegVidDec * ffmpegdec,GstVideoCodecFrame * frame,GstFlowReturn * ret)1714 gst_ffmpegviddec_frame (GstFFMpegVidDec * ffmpegdec, GstVideoCodecFrame * frame,
1715     GstFlowReturn * ret)
1716 {
1717   gboolean got_frame = FALSE;
1718 
1719   if (G_UNLIKELY (ffmpegdec->context->codec == NULL))
1720     goto no_codec;
1721 
1722   *ret = GST_FLOW_OK;
1723   ffmpegdec->context->frame_number++;
1724 
1725   got_frame = gst_ffmpegviddec_video_frame (ffmpegdec, frame, ret);
1726 
1727   return got_frame;
1728 
1729   /* ERRORS */
1730 no_codec:
1731   {
1732     GST_ERROR_OBJECT (ffmpegdec, "no codec context");
1733     *ret = GST_FLOW_NOT_NEGOTIATED;
1734     return -1;
1735   }
1736 }
1737 
1738 static GstFlowReturn
gst_ffmpegviddec_drain(GstVideoDecoder * decoder)1739 gst_ffmpegviddec_drain (GstVideoDecoder * decoder)
1740 {
1741   GstFFMpegVidDec *ffmpegdec = (GstFFMpegVidDec *) decoder;
1742   GstFFMpegVidDecClass *oclass;
1743 
1744   if (!ffmpegdec->opened)
1745     return GST_FLOW_OK;
1746 
1747   oclass = (GstFFMpegVidDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec));
1748 
1749   if (oclass->in_plugin->capabilities & AV_CODEC_CAP_DELAY) {
1750     GstFlowReturn ret;
1751     gboolean got_frame = FALSE;
1752 
1753     GST_LOG_OBJECT (ffmpegdec,
1754         "codec has delay capabilities, calling until ffmpeg has drained everything");
1755 
1756     if (avcodec_send_packet (ffmpegdec->context, NULL))
1757       goto send_packet_failed;
1758 
1759     do {
1760       got_frame = gst_ffmpegviddec_frame (ffmpegdec, NULL, &ret);
1761     } while (got_frame && ret == GST_FLOW_OK);
1762     avcodec_flush_buffers (ffmpegdec->context);
1763   }
1764 
1765 done:
1766   return GST_FLOW_OK;
1767 
1768 send_packet_failed:
1769   GST_WARNING_OBJECT (ffmpegdec, "send packet failed, could not drain decoder");
1770   goto done;
1771 }
1772 
1773 static GstFlowReturn
gst_ffmpegviddec_handle_frame(GstVideoDecoder * decoder,GstVideoCodecFrame * frame)1774 gst_ffmpegviddec_handle_frame (GstVideoDecoder * decoder,
1775     GstVideoCodecFrame * frame)
1776 {
1777   GstFFMpegVidDec *ffmpegdec = (GstFFMpegVidDec *) decoder;
1778   guint8 *data;
1779   gint size;
1780   gboolean got_frame;
1781   GstMapInfo minfo;
1782   GstFlowReturn ret = GST_FLOW_OK;
1783   AVPacket packet;
1784 
1785   GST_LOG_OBJECT (ffmpegdec,
1786       "Received new data of size %" G_GSIZE_FORMAT ", dts %" GST_TIME_FORMAT
1787       ", pts:%" GST_TIME_FORMAT ", dur:%" GST_TIME_FORMAT,
1788       gst_buffer_get_size (frame->input_buffer), GST_TIME_ARGS (frame->dts),
1789       GST_TIME_ARGS (frame->pts), GST_TIME_ARGS (frame->duration));
1790 
1791   if (!gst_buffer_map (frame->input_buffer, &minfo, GST_MAP_READ)) {
1792     GST_ELEMENT_ERROR (ffmpegdec, STREAM, DECODE, ("Decoding problem"),
1793         ("Failed to map buffer for reading"));
1794     return GST_FLOW_ERROR;
1795   }
1796 
1797   /* treat frame as void until a buffer is requested for it */
1798   GST_VIDEO_CODEC_FRAME_FLAG_SET (frame,
1799       GST_VIDEO_CODEC_FRAME_FLAG_DECODE_ONLY);
1800 
1801   data = minfo.data;
1802   size = minfo.size;
1803 
1804   if (size > 0 && (!GST_MEMORY_IS_ZERO_PADDED (minfo.memory)
1805           || (minfo.maxsize - minfo.size) < AV_INPUT_BUFFER_PADDING_SIZE)) {
1806     /* add padding */
1807     if (ffmpegdec->padded_size < size + AV_INPUT_BUFFER_PADDING_SIZE) {
1808       ffmpegdec->padded_size = size + AV_INPUT_BUFFER_PADDING_SIZE;
1809       ffmpegdec->padded = g_realloc (ffmpegdec->padded, ffmpegdec->padded_size);
1810       GST_LOG_OBJECT (ffmpegdec, "resized padding buffer to %d",
1811           ffmpegdec->padded_size);
1812     }
1813     GST_CAT_TRACE_OBJECT (GST_CAT_PERFORMANCE, ffmpegdec,
1814         "Copy input to add padding");
1815     memcpy (ffmpegdec->padded, data, size);
1816     memset (ffmpegdec->padded + size, 0, AV_INPUT_BUFFER_PADDING_SIZE);
1817 
1818     data = ffmpegdec->padded;
1819   }
1820 
1821   /* now decode the frame */
1822   gst_avpacket_init (&packet, data, size);
1823 
1824   if (ffmpegdec->palette) {
1825     guint8 *pal;
1826 
1827     pal = av_packet_new_side_data (&packet, AV_PKT_DATA_PALETTE,
1828         AVPALETTE_SIZE);
1829     gst_buffer_extract (ffmpegdec->palette, 0, pal, AVPALETTE_SIZE);
1830     GST_DEBUG_OBJECT (ffmpegdec, "copy pal %p %p", &packet, pal);
1831   }
1832 
1833   if (!packet.size)
1834     goto done;
1835 
1836   /* save reference to the timing info */
1837   ffmpegdec->context->reordered_opaque = (gint64) frame->system_frame_number;
1838   ffmpegdec->picture->reordered_opaque = (gint64) frame->system_frame_number;
1839 
1840   GST_DEBUG_OBJECT (ffmpegdec, "stored opaque values idx %d",
1841       frame->system_frame_number);
1842 
1843   /* This might call into get_buffer() from another thread,
1844    * which would cause a deadlock. Release the lock here
1845    * and taking it again later seems safe
1846    * See https://bugzilla.gnome.org/show_bug.cgi?id=726020
1847    */
1848   GST_VIDEO_DECODER_STREAM_UNLOCK (ffmpegdec);
1849   if (avcodec_send_packet (ffmpegdec->context, &packet) < 0) {
1850     GST_VIDEO_DECODER_STREAM_LOCK (ffmpegdec);
1851     goto send_packet_failed;
1852   }
1853   GST_VIDEO_DECODER_STREAM_LOCK (ffmpegdec);
1854 
1855   do {
1856     /* decode a frame of audio/video now */
1857     got_frame = gst_ffmpegviddec_frame (ffmpegdec, frame, &ret);
1858 
1859     if (ret != GST_FLOW_OK) {
1860       GST_LOG_OBJECT (ffmpegdec, "breaking because of flow ret %s",
1861           gst_flow_get_name (ret));
1862       break;
1863     }
1864   } while (got_frame);
1865 
1866 done:
1867   gst_buffer_unmap (frame->input_buffer, &minfo);
1868   gst_video_codec_frame_unref (frame);
1869 
1870   return ret;
1871 
1872 send_packet_failed:
1873   {
1874     GST_WARNING_OBJECT (ffmpegdec, "Failed to send data for decoding");
1875     goto done;
1876   }
1877 }
1878 
1879 static gboolean
gst_ffmpegviddec_start(GstVideoDecoder * decoder)1880 gst_ffmpegviddec_start (GstVideoDecoder * decoder)
1881 {
1882   GstFFMpegVidDec *ffmpegdec = (GstFFMpegVidDec *) decoder;
1883   GstFFMpegVidDecClass *oclass;
1884 
1885   oclass = (GstFFMpegVidDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec));
1886 
1887   GST_OBJECT_LOCK (ffmpegdec);
1888   gst_ffmpeg_avcodec_close (ffmpegdec->context);
1889   if (avcodec_get_context_defaults3 (ffmpegdec->context, oclass->in_plugin) < 0) {
1890     GST_DEBUG_OBJECT (ffmpegdec, "Failed to set context defaults");
1891     GST_OBJECT_UNLOCK (ffmpegdec);
1892     return FALSE;
1893   }
1894   ffmpegdec->context->opaque = ffmpegdec;
1895   GST_OBJECT_UNLOCK (ffmpegdec);
1896 
1897   return TRUE;
1898 }
1899 
1900 static gboolean
gst_ffmpegviddec_stop(GstVideoDecoder * decoder)1901 gst_ffmpegviddec_stop (GstVideoDecoder * decoder)
1902 {
1903   GstFFMpegVidDec *ffmpegdec = (GstFFMpegVidDec *) decoder;
1904 
1905   GST_OBJECT_LOCK (ffmpegdec);
1906   gst_ffmpegviddec_close (ffmpegdec, FALSE);
1907   GST_OBJECT_UNLOCK (ffmpegdec);
1908   g_free (ffmpegdec->padded);
1909   ffmpegdec->padded = NULL;
1910   ffmpegdec->padded_size = 0;
1911   if (ffmpegdec->input_state)
1912     gst_video_codec_state_unref (ffmpegdec->input_state);
1913   ffmpegdec->input_state = NULL;
1914   if (ffmpegdec->output_state)
1915     gst_video_codec_state_unref (ffmpegdec->output_state);
1916   ffmpegdec->output_state = NULL;
1917 
1918   if (ffmpegdec->internal_pool)
1919     gst_object_unref (ffmpegdec->internal_pool);
1920   ffmpegdec->internal_pool = NULL;
1921 
1922   ffmpegdec->pic_pix_fmt = 0;
1923   ffmpegdec->pic_width = 0;
1924   ffmpegdec->pic_height = 0;
1925   ffmpegdec->pic_par_n = 0;
1926   ffmpegdec->pic_par_d = 0;
1927   ffmpegdec->pic_interlaced = 0;
1928   ffmpegdec->pic_field_order = 0;
1929   ffmpegdec->pic_field_order_changed = FALSE;
1930   ffmpegdec->ctx_ticks = 0;
1931   ffmpegdec->ctx_time_n = 0;
1932   ffmpegdec->ctx_time_d = 0;
1933 
1934   ffmpegdec->pool_width = 0;
1935   ffmpegdec->pool_height = 0;
1936   ffmpegdec->pool_format = 0;
1937 
1938   return TRUE;
1939 }
1940 
1941 static GstFlowReturn
gst_ffmpegviddec_finish(GstVideoDecoder * decoder)1942 gst_ffmpegviddec_finish (GstVideoDecoder * decoder)
1943 {
1944   gst_ffmpegviddec_drain (decoder);
1945   /* note that finish can and should clean up more drastically,
1946    * but drain is also invoked on e.g. packet loss in GAP handling */
1947   gst_ffmpegviddec_flush (decoder);
1948 
1949   return GST_FLOW_OK;
1950 }
1951 
1952 static gboolean
gst_ffmpegviddec_flush(GstVideoDecoder * decoder)1953 gst_ffmpegviddec_flush (GstVideoDecoder * decoder)
1954 {
1955   GstFFMpegVidDec *ffmpegdec = (GstFFMpegVidDec *) decoder;
1956 
1957   if (ffmpegdec->opened) {
1958     GST_LOG_OBJECT (decoder, "flushing buffers");
1959     avcodec_flush_buffers (ffmpegdec->context);
1960   }
1961 
1962   return TRUE;
1963 }
1964 
1965 static gboolean
gst_ffmpegviddec_decide_allocation(GstVideoDecoder * decoder,GstQuery * query)1966 gst_ffmpegviddec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
1967 {
1968   GstFFMpegVidDec *ffmpegdec = (GstFFMpegVidDec *) decoder;
1969   GstVideoCodecState *state;
1970   GstBufferPool *pool;
1971   guint size, min, max;
1972   GstStructure *config;
1973   gboolean have_pool, have_videometa, have_alignment, update_pool = FALSE;
1974   GstAllocator *allocator = NULL;
1975   GstAllocationParams params = DEFAULT_ALLOC_PARAM;
1976 
1977   have_pool = (gst_query_get_n_allocation_pools (query) != 0);
1978 
1979   if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder,
1980           query))
1981     return FALSE;
1982 
1983   state = gst_video_decoder_get_output_state (decoder);
1984 
1985   if (gst_query_get_n_allocation_params (query) > 0) {
1986     gst_query_parse_nth_allocation_param (query, 0, &allocator, &params);
1987     params.align = MAX (params.align, DEFAULT_STRIDE_ALIGN);
1988   } else {
1989     gst_query_add_allocation_param (query, allocator, &params);
1990   }
1991 
1992   gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
1993 
1994   /* Don't use pool that can't grow, as we don't know how many buffer we'll
1995    * need, otherwise we may stall */
1996   if (max != 0 && max < REQUIRED_POOL_MAX_BUFFERS) {
1997     gst_object_unref (pool);
1998     pool = gst_video_buffer_pool_new ();
1999     max = 0;
2000     update_pool = TRUE;
2001     have_pool = FALSE;
2002 
2003     /* if there is an allocator, also drop it, as it might be the reason we
2004      * have this limit. Default will be used */
2005     if (allocator) {
2006       gst_object_unref (allocator);
2007       allocator = NULL;
2008     }
2009   }
2010 
2011   config = gst_buffer_pool_get_config (pool);
2012   gst_buffer_pool_config_set_params (config, state->caps, size, min, max);
2013   gst_buffer_pool_config_set_allocator (config, allocator, &params);
2014 
2015   have_videometa =
2016       gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
2017 
2018   if (have_videometa)
2019     gst_buffer_pool_config_add_option (config,
2020         GST_BUFFER_POOL_OPTION_VIDEO_META);
2021 
2022   have_alignment =
2023       gst_buffer_pool_has_option (pool, GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT);
2024 
2025   /* If we have videometa, we never have to copy */
2026   if (have_videometa && have_pool && have_alignment &&
2027       gst_ffmpegviddec_can_direct_render (ffmpegdec)) {
2028     GstStructure *config_copy = gst_structure_copy (config);
2029 
2030     gst_ffmpegvideodec_prepare_dr_pool (ffmpegdec, pool, &state->info,
2031         config_copy);
2032 
2033     /* FIXME validate and retry */
2034     if (gst_buffer_pool_set_config (pool, config_copy)) {
2035       GstFlowReturn ret;
2036       GstBuffer *tmp;
2037 
2038       gst_buffer_pool_set_active (pool, TRUE);
2039       ret = gst_buffer_pool_acquire_buffer (pool, &tmp, NULL);
2040       if (ret == GST_FLOW_OK) {
2041         GstVideoMeta *vmeta = gst_buffer_get_video_meta (tmp);
2042         gboolean same_stride = TRUE;
2043         guint i;
2044 
2045         for (i = 0; i < vmeta->n_planes; i++) {
2046           if (vmeta->stride[i] != ffmpegdec->stride[i]) {
2047             same_stride = FALSE;
2048             break;
2049           }
2050         }
2051 
2052         gst_buffer_unref (tmp);
2053 
2054         if (same_stride) {
2055           if (ffmpegdec->internal_pool)
2056             gst_object_unref (ffmpegdec->internal_pool);
2057           ffmpegdec->internal_pool = gst_object_ref (pool);
2058           ffmpegdec->pool_info = state->info;
2059           gst_structure_free (config);
2060           goto done;
2061         }
2062       }
2063     }
2064   }
2065 
2066   if (have_videometa && ffmpegdec->internal_pool
2067       && ffmpegdec->pool_width == state->info.width
2068       && ffmpegdec->pool_height == state->info.height) {
2069     update_pool = TRUE;
2070     gst_object_unref (pool);
2071     pool = gst_object_ref (ffmpegdec->internal_pool);
2072     gst_structure_free (config);
2073     goto done;
2074   }
2075 
2076   /* configure */
2077   if (!gst_buffer_pool_set_config (pool, config)) {
2078     gboolean working_pool = FALSE;
2079     config = gst_buffer_pool_get_config (pool);
2080 
2081     if (gst_buffer_pool_config_validate_params (config, state->caps, size, min,
2082             max)) {
2083       working_pool = gst_buffer_pool_set_config (pool, config);
2084     } else {
2085       gst_structure_free (config);
2086     }
2087 
2088     if (!working_pool) {
2089       gst_object_unref (pool);
2090       pool = gst_video_buffer_pool_new ();
2091       config = gst_buffer_pool_get_config (pool);
2092       gst_buffer_pool_config_set_params (config, state->caps, size, min, max);
2093       gst_buffer_pool_config_set_allocator (config, NULL, &params);
2094       gst_buffer_pool_set_config (pool, config);
2095       update_pool = TRUE;
2096     }
2097   }
2098 
2099 done:
2100   /* and store */
2101   if (update_pool)
2102     gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
2103 
2104   gst_object_unref (pool);
2105   if (allocator)
2106     gst_object_unref (allocator);
2107   gst_video_codec_state_unref (state);
2108 
2109   return TRUE;
2110 }
2111 
2112 static gboolean
gst_ffmpegviddec_propose_allocation(GstVideoDecoder * decoder,GstQuery * query)2113 gst_ffmpegviddec_propose_allocation (GstVideoDecoder * decoder,
2114     GstQuery * query)
2115 {
2116   GstAllocationParams params;
2117 
2118   gst_allocation_params_init (&params);
2119   params.flags = GST_MEMORY_FLAG_ZERO_PADDED;
2120   params.align = DEFAULT_STRIDE_ALIGN;
2121   params.padding = AV_INPUT_BUFFER_PADDING_SIZE;
2122   /* we would like to have some padding so that we don't have to
2123    * memcpy. We don't suggest an allocator. */
2124   gst_query_add_allocation_param (query, NULL, &params);
2125 
2126   return GST_VIDEO_DECODER_CLASS (parent_class)->propose_allocation (decoder,
2127       query);
2128 }
2129 
2130 static void
gst_ffmpegviddec_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)2131 gst_ffmpegviddec_set_property (GObject * object,
2132     guint prop_id, const GValue * value, GParamSpec * pspec)
2133 {
2134   GstFFMpegVidDec *ffmpegdec = (GstFFMpegVidDec *) object;
2135 
2136   switch (prop_id) {
2137     case PROP_LOWRES:
2138       ffmpegdec->lowres = ffmpegdec->context->lowres = g_value_get_enum (value);
2139       break;
2140     case PROP_SKIPFRAME:
2141       ffmpegdec->skip_frame = ffmpegdec->context->skip_frame =
2142           g_value_get_enum (value);
2143       break;
2144     case PROP_DIRECT_RENDERING:
2145       ffmpegdec->direct_rendering = g_value_get_boolean (value);
2146       break;
2147     case PROP_DEBUG_MV:
2148       ffmpegdec->debug_mv = ffmpegdec->context->debug_mv =
2149           g_value_get_boolean (value);
2150       break;
2151     case PROP_MAX_THREADS:
2152       ffmpegdec->max_threads = g_value_get_int (value);
2153       break;
2154     case PROP_OUTPUT_CORRUPT:
2155       ffmpegdec->output_corrupt = g_value_get_boolean (value);
2156       break;
2157     default:
2158       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
2159       break;
2160   }
2161 }
2162 
2163 static void
gst_ffmpegviddec_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)2164 gst_ffmpegviddec_get_property (GObject * object,
2165     guint prop_id, GValue * value, GParamSpec * pspec)
2166 {
2167   GstFFMpegVidDec *ffmpegdec = (GstFFMpegVidDec *) object;
2168 
2169   switch (prop_id) {
2170     case PROP_LOWRES:
2171       g_value_set_enum (value, ffmpegdec->context->lowres);
2172       break;
2173     case PROP_SKIPFRAME:
2174       g_value_set_enum (value, ffmpegdec->context->skip_frame);
2175       break;
2176     case PROP_DIRECT_RENDERING:
2177       g_value_set_boolean (value, ffmpegdec->direct_rendering);
2178       break;
2179     case PROP_DEBUG_MV:
2180       g_value_set_boolean (value, ffmpegdec->context->debug_mv);
2181       break;
2182     case PROP_MAX_THREADS:
2183       g_value_set_int (value, ffmpegdec->max_threads);
2184       break;
2185     case PROP_OUTPUT_CORRUPT:
2186       g_value_set_boolean (value, ffmpegdec->output_corrupt);
2187       break;
2188     default:
2189       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
2190       break;
2191   }
2192 }
2193 
2194 gboolean
gst_ffmpegviddec_register(GstPlugin * plugin)2195 gst_ffmpegviddec_register (GstPlugin * plugin)
2196 {
2197   GTypeInfo typeinfo = {
2198     sizeof (GstFFMpegVidDecClass),
2199     (GBaseInitFunc) gst_ffmpegviddec_base_init,
2200     NULL,
2201     (GClassInitFunc) gst_ffmpegviddec_class_init,
2202     NULL,
2203     NULL,
2204     sizeof (GstFFMpegVidDec),
2205     0,
2206     (GInstanceInitFunc) gst_ffmpegviddec_init,
2207   };
2208   GType type;
2209   AVCodec *in_plugin;
2210   gint rank;
2211   void *i = 0;
2212 
2213   GST_LOG ("Registering decoders");
2214 
2215   while ((in_plugin = (AVCodec *) av_codec_iterate (&i))) {
2216     gchar *type_name;
2217     gchar *plugin_name;
2218 
2219     /* only video decoders */
2220     if (!av_codec_is_decoder (in_plugin)
2221         || in_plugin->type != AVMEDIA_TYPE_VIDEO)
2222       continue;
2223 
2224     /* no quasi codecs, please */
2225     if (in_plugin->id == AV_CODEC_ID_RAWVIDEO ||
2226         in_plugin->id == AV_CODEC_ID_V210 ||
2227         in_plugin->id == AV_CODEC_ID_V210X ||
2228         in_plugin->id == AV_CODEC_ID_V308 ||
2229         in_plugin->id == AV_CODEC_ID_V408 ||
2230         in_plugin->id == AV_CODEC_ID_V410 ||
2231         in_plugin->id == AV_CODEC_ID_R210
2232         || in_plugin->id == AV_CODEC_ID_AYUV
2233         || in_plugin->id == AV_CODEC_ID_Y41P
2234         || in_plugin->id == AV_CODEC_ID_012V
2235         || in_plugin->id == AV_CODEC_ID_YUV4
2236 #if AV_VERSION_INT (LIBAVCODEC_VERSION_MAJOR, LIBAVCODEC_VERSION_MINOR, LIBAVCODEC_VERSION_MICRO) >= \
2237         AV_VERSION_INT (57,4,0)
2238         || in_plugin->id == AV_CODEC_ID_WRAPPED_AVFRAME
2239 #endif
2240         || in_plugin->id == AV_CODEC_ID_ZLIB) {
2241       continue;
2242     }
2243 
2244     /* No decoders depending on external libraries (we don't build them, but
2245      * people who build against an external ffmpeg might have them.
2246      * We have native gstreamer plugins for all of those libraries anyway. */
2247     if (!strncmp (in_plugin->name, "lib", 3)) {
2248       GST_DEBUG
2249           ("Not using external library decoder %s. Use the gstreamer-native ones instead.",
2250           in_plugin->name);
2251       continue;
2252     }
2253 
2254     /* No vdpau plugins until we can figure out how to properly use them
2255      * outside of ffmpeg. */
2256     if (g_str_has_suffix (in_plugin->name, "_vdpau")) {
2257       GST_DEBUG
2258           ("Ignoring VDPAU decoder %s. We can't handle this outside of ffmpeg",
2259           in_plugin->name);
2260       continue;
2261     }
2262 
2263     if (g_str_has_suffix (in_plugin->name, "_xvmc")) {
2264       GST_DEBUG
2265           ("Ignoring XVMC decoder %s. We can't handle this outside of ffmpeg",
2266           in_plugin->name);
2267       continue;
2268     }
2269 
2270     if (strstr (in_plugin->name, "vaapi")) {
2271       GST_DEBUG
2272           ("Ignoring VAAPI decoder %s. We can't handle this outside of ffmpeg",
2273           in_plugin->name);
2274       continue;
2275     }
2276 
2277     if (g_str_has_suffix (in_plugin->name, "_qsv")) {
2278       GST_DEBUG
2279           ("Ignoring qsv decoder %s. We can't handle this outside of ffmpeg",
2280           in_plugin->name);
2281       continue;
2282     }
2283 
2284     if (g_str_has_suffix (in_plugin->name, "_cuvid")) {
2285       GST_DEBUG
2286           ("Ignoring CUVID decoder %s. We can't handle this outside of ffmpeg",
2287           in_plugin->name);
2288       continue;
2289     }
2290 
2291     if (g_str_has_suffix (in_plugin->name, "_v4l2m2m")) {
2292       GST_DEBUG
2293           ("Ignoring V4L2 mem-to-mem decoder %s. We can't handle this outside of ffmpeg",
2294           in_plugin->name);
2295       continue;
2296     }
2297 
2298     GST_DEBUG ("Trying plugin %s [%s]", in_plugin->name, in_plugin->long_name);
2299 
2300     /* no codecs for which we're GUARANTEED to have better alternatives */
2301     /* MPEG1VIDEO : the mpeg2video decoder is preferred */
2302     /* MP1 : Use MP3 for decoding */
2303     /* MP2 : Use MP3 for decoding */
2304     /* Theora: Use libtheora based theoradec */
2305     if (!strcmp (in_plugin->name, "theora") ||
2306         !strcmp (in_plugin->name, "mpeg1video") ||
2307         strstr (in_plugin->name, "crystalhd") != NULL ||
2308         !strcmp (in_plugin->name, "ass") ||
2309         !strcmp (in_plugin->name, "srt") ||
2310         !strcmp (in_plugin->name, "pgssub") ||
2311         !strcmp (in_plugin->name, "dvdsub") ||
2312         !strcmp (in_plugin->name, "dvbsub")) {
2313       GST_LOG ("Ignoring decoder %s", in_plugin->name);
2314       continue;
2315     }
2316 
2317     /* construct the type */
2318     if (!strcmp (in_plugin->name, "hevc")) {
2319       plugin_name = g_strdup ("h265");
2320     } else {
2321       plugin_name = g_strdup ((gchar *) in_plugin->name);
2322     }
2323     g_strdelimit (plugin_name, NULL, '_');
2324     type_name = g_strdup_printf ("avdec_%s", plugin_name);
2325     g_free (plugin_name);
2326 
2327     type = g_type_from_name (type_name);
2328 
2329     if (!type) {
2330       /* create the gtype now */
2331       type =
2332           g_type_register_static (GST_TYPE_VIDEO_DECODER, type_name, &typeinfo,
2333           0);
2334       g_type_set_qdata (type, GST_FFDEC_PARAMS_QDATA, (gpointer) in_plugin);
2335     }
2336 
2337     /* (Ronald) MPEG-4 gets a higher priority because it has been well-
2338      * tested and by far outperforms divxdec/xviddec - so we prefer it.
2339      * msmpeg4v3 same, as it outperforms divxdec for divx3 playback.
2340      * VC1/WMV3 are not working and thus unpreferred for now. */
2341     switch (in_plugin->id) {
2342       case AV_CODEC_ID_MPEG1VIDEO:
2343       case AV_CODEC_ID_MPEG2VIDEO:
2344       case AV_CODEC_ID_MPEG4:
2345       case AV_CODEC_ID_MSMPEG4V3:
2346       case AV_CODEC_ID_H264:
2347       case AV_CODEC_ID_HEVC:
2348       case AV_CODEC_ID_RV10:
2349       case AV_CODEC_ID_RV20:
2350       case AV_CODEC_ID_RV30:
2351       case AV_CODEC_ID_RV40:
2352         rank = GST_RANK_PRIMARY;
2353         break;
2354         /* DVVIDEO: we have a good dv decoder, fast on both ppc as well as x86.
2355          * They say libdv's quality is better though. leave as secondary.
2356          * note: if you change this, see the code in gstdv.c in good/ext/dv.
2357          */
2358       case AV_CODEC_ID_DVVIDEO:
2359         rank = GST_RANK_SECONDARY;
2360         break;
2361       default:
2362         rank = GST_RANK_MARGINAL;
2363         break;
2364     }
2365     if (!gst_element_register (plugin, type_name, rank, type)) {
2366       g_warning ("Failed to register %s", type_name);
2367       g_free (type_name);
2368       return FALSE;
2369     }
2370 
2371     g_free (type_name);
2372   }
2373 
2374   GST_LOG ("Finished Registering decoders");
2375 
2376   return TRUE;
2377 }
2378