• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* GStreamer
2  * Copyright (C) 2019 Seungha Yang <seungha.yang@navercorp.com>
3  *
4  * This library is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Library General Public
6  * License as published by the Free Software Foundation; either
7  * version 2 of the License, or (at your option) any later version.
8  *
9  * This library is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12  * Library General Public License for more details.
13  *
14  * You should have received a copy of the GNU Library General Public
15  * License along with this library; if not, write to the
16  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17  * Boston, MA 02110-1301, USA.
18  *
19  * NOTE: some of implementations are copied/modified from Chromium code
20  *
21  * Copyright 2015 The Chromium Authors. All rights reserved.
22  *
23  * Redistribution and use in source and binary forms, with or without
24  * modification, are permitted provided that the following conditions are
25  * met:
26  *
27  *    * Redistributions of source code must retain the above copyright
28  * notice, this list of conditions and the following disclaimer.
29  *    * Redistributions in binary form must reproduce the above
30  * copyright notice, this list of conditions and the following disclaimer
31  * in the documentation and/or other materials provided with the
32  * distribution.
33  *    * Neither the name of Google Inc. nor the names of its
34  * contributors may be used to endorse or promote products derived from
35  * this software without specific prior written permission.
36  *
37  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
38  * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
39  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
40  * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
41  * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
42  * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
43  * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
44  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
45  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
46  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
47  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
48  */
49 /**
50  * SECTION:gsth264decoder
51  * @title: GstH264Decoder
52  * @short_description: Base class to implement stateless H.264 decoders
53  * @sources:
54  * - gsth264picture.h
55  */
56 
57 #ifdef HAVE_CONFIG_H
58 #include <config.h>
59 #endif
60 
61 #include <gst/base/base.h>
62 #include "gsth264decoder.h"
63 
64 GST_DEBUG_CATEGORY (gst_h264_decoder_debug);
65 #define GST_CAT_DEFAULT gst_h264_decoder_debug
66 
67 typedef enum
68 {
69   GST_H264_DECODER_FORMAT_NONE,
70   GST_H264_DECODER_FORMAT_AVC,
71   GST_H264_DECODER_FORMAT_BYTE
72 } GstH264DecoderFormat;
73 
74 typedef enum
75 {
76   GST_H264_DECODER_ALIGN_NONE,
77   GST_H264_DECODER_ALIGN_NAL,
78   GST_H264_DECODER_ALIGN_AU
79 } GstH264DecoderAlign;
80 
81 struct _GstH264DecoderPrivate
82 {
83   GstH264DecoderCompliance compliance;
84 
85   guint8 profile_idc;
86   gint width, height;
87 
88   /* input codec_data, if any */
89   GstBuffer *codec_data;
90   guint nal_length_size;
91 
92   /* state */
93   GstH264DecoderFormat in_format;
94   GstH264DecoderAlign align;
95   GstH264NalParser *parser;
96   GstH264Dpb *dpb;
97   /* Cache last field which can not enter the DPB, should be a non ref */
98   GstH264Picture *last_field;
99 
100   /* used for low-latency vs. high throughput mode decision */
101   gboolean is_live;
102 
103   /* sps/pps of the current slice */
104   const GstH264SPS *active_sps;
105   const GstH264PPS *active_pps;
106 
107   /* Picture currently being processed/decoded */
108   GstH264Picture *current_picture;
109   GstVideoCodecFrame *current_frame;
110 
111   /* Slice (slice header + nalu) currently being processed/decodec */
112   GstH264Slice current_slice;
113 
114   gint max_frame_num;
115   gint max_pic_num;
116   gint max_long_term_frame_idx;
117 
118   gint prev_frame_num;
119   gint prev_ref_frame_num;
120   gint prev_frame_num_offset;
121   gboolean prev_has_memmgmnt5;
122 
123   /* Values related to previously decoded reference picture */
124   gboolean prev_ref_has_memmgmnt5;
125   gint prev_ref_top_field_order_cnt;
126   gint prev_ref_pic_order_cnt_msb;
127   gint prev_ref_pic_order_cnt_lsb;
128 
129   GstH264PictureField prev_ref_field;
130 
131   /* PicOrderCount of the previously outputted frame */
132   gint last_output_poc;
133 
134   gboolean process_ref_pic_lists;
135   guint preferred_output_delay;
136 
137   /* Reference picture lists, constructed for each frame */
138   GArray *ref_pic_list_p0;
139   GArray *ref_pic_list_b0;
140   GArray *ref_pic_list_b1;
141 
142   /* Temporary picture list, for reference picture lists in fields,
143    * corresponding to 8.2.4.2.2 refFrameList0ShortTerm, refFrameList0LongTerm
144    * and 8.2.4.2.5 refFrameList1ShortTerm and refFrameListLongTerm */
145   GArray *ref_frame_list_0_short_term;
146   GArray *ref_frame_list_1_short_term;
147   GArray *ref_frame_list_long_term;
148 
149   /* Reference picture lists, constructed for each slice */
150   GArray *ref_pic_list0;
151   GArray *ref_pic_list1;
152 
153   /* For delayed output */
154   GstQueueArray *output_queue;
155 };
156 
157 typedef struct
158 {
159   /* Holds ref */
160   GstVideoCodecFrame *frame;
161   GstH264Picture *picture;
162   /* Without ref */
163   GstH264Decoder *self;
164 } GstH264DecoderOutputFrame;
165 
166 #define UPDATE_FLOW_RETURN(ret,new_ret) G_STMT_START { \
167   if (*(ret) == GST_FLOW_OK) \
168     *(ret) = new_ret; \
169 } G_STMT_END
170 
171 #define parent_class gst_h264_decoder_parent_class
172 G_DEFINE_ABSTRACT_TYPE_WITH_CODE (GstH264Decoder, gst_h264_decoder,
173     GST_TYPE_VIDEO_DECODER,
174     G_ADD_PRIVATE (GstH264Decoder);
175     GST_DEBUG_CATEGORY_INIT (gst_h264_decoder_debug, "h264decoder", 0,
176         "H.264 Video Decoder"));
177 
178 static void gst_h264_decoder_finalize (GObject * object);
179 
180 static gboolean gst_h264_decoder_start (GstVideoDecoder * decoder);
181 static gboolean gst_h264_decoder_stop (GstVideoDecoder * decoder);
182 static gboolean gst_h264_decoder_set_format (GstVideoDecoder * decoder,
183     GstVideoCodecState * state);
184 static GstFlowReturn gst_h264_decoder_finish (GstVideoDecoder * decoder);
185 static gboolean gst_h264_decoder_flush (GstVideoDecoder * decoder);
186 static GstFlowReturn gst_h264_decoder_drain (GstVideoDecoder * decoder);
187 static GstFlowReturn gst_h264_decoder_handle_frame (GstVideoDecoder * decoder,
188     GstVideoCodecFrame * frame);
189 
190 /* codec specific functions */
191 static GstFlowReturn gst_h264_decoder_process_sps (GstH264Decoder * self,
192     GstH264SPS * sps);
193 static GstFlowReturn gst_h264_decoder_decode_slice (GstH264Decoder * self);
194 static GstFlowReturn gst_h264_decoder_decode_nal (GstH264Decoder * self,
195     GstH264NalUnit * nalu);
196 static gboolean gst_h264_decoder_fill_picture_from_slice (GstH264Decoder * self,
197     const GstH264Slice * slice, GstH264Picture * picture);
198 static gboolean gst_h264_decoder_calculate_poc (GstH264Decoder * self,
199     GstH264Picture * picture);
200 static gboolean gst_h264_decoder_init_gap_picture (GstH264Decoder * self,
201     GstH264Picture * picture, gint frame_num);
202 static GstFlowReturn gst_h264_decoder_drain_internal (GstH264Decoder * self);
203 static void gst_h264_decoder_finish_current_picture (GstH264Decoder * self,
204     GstFlowReturn * ret);
205 static void gst_h264_decoder_finish_picture (GstH264Decoder * self,
206     GstH264Picture * picture, GstFlowReturn * ret);
207 static void gst_h264_decoder_prepare_ref_pic_lists (GstH264Decoder * self,
208     GstH264Picture * current_picture);
209 static void gst_h264_decoder_clear_ref_pic_lists (GstH264Decoder * self);
210 static gboolean gst_h264_decoder_modify_ref_pic_lists (GstH264Decoder * self);
211 static gboolean
212 gst_h264_decoder_sliding_window_picture_marking (GstH264Decoder * self,
213     GstH264Picture * picture);
214 static void gst_h264_decoder_do_output_picture (GstH264Decoder * self,
215     GstH264Picture * picture, GstFlowReturn * ret);
216 static GstH264Picture *gst_h264_decoder_new_field_picture (GstH264Decoder *
217     self, GstH264Picture * picture);
218 static void
219 gst_h264_decoder_clear_output_frame (GstH264DecoderOutputFrame * output_frame);
220 
221 enum
222 {
223   PROP_0,
224   PROP_COMPLIANCE,
225 };
226 
227 /**
228  * gst_h264_decoder_compliance_get_type:
229  *
230  * Get the compliance type of the h264 decoder.
231  *
232  * Since: 1.20
233  */
234 GType
gst_h264_decoder_compliance_get_type(void)235 gst_h264_decoder_compliance_get_type (void)
236 {
237   static gsize h264_decoder_compliance_type = 0;
238   static const GEnumValue compliances[] = {
239     {GST_H264_DECODER_COMPLIANCE_AUTO, "GST_H264_DECODER_COMPLIANCE_AUTO",
240         "auto"},
241     {GST_H264_DECODER_COMPLIANCE_STRICT, "GST_H264_DECODER_COMPLIANCE_STRICT",
242         "strict"},
243     {GST_H264_DECODER_COMPLIANCE_NORMAL, "GST_H264_DECODER_COMPLIANCE_NORMAL",
244         "normal"},
245     {GST_H264_DECODER_COMPLIANCE_FLEXIBLE,
246         "GST_H264_DECODER_COMPLIANCE_FLEXIBLE", "flexible"},
247     {0, NULL, NULL},
248   };
249 
250 
251   if (g_once_init_enter (&h264_decoder_compliance_type)) {
252     GType _type;
253 
254     _type = g_enum_register_static ("GstH264DecoderCompliance", compliances);
255     g_once_init_leave (&h264_decoder_compliance_type, _type);
256   }
257 
258   return (GType) h264_decoder_compliance_type;
259 }
260 
261 static void
gst_h264_decoder_get_property(GObject * object,guint property_id,GValue * value,GParamSpec * pspec)262 gst_h264_decoder_get_property (GObject * object, guint property_id,
263     GValue * value, GParamSpec * pspec)
264 {
265   GstH264Decoder *self = GST_H264_DECODER (object);
266   GstH264DecoderPrivate *priv = self->priv;
267 
268   switch (property_id) {
269     case PROP_COMPLIANCE:
270       GST_OBJECT_LOCK (self);
271       g_value_set_enum (value, priv->compliance);
272       GST_OBJECT_UNLOCK (self);
273       break;
274     default:
275       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
276       break;
277   }
278 }
279 
280 static void
gst_h264_decoder_set_property(GObject * object,guint property_id,const GValue * value,GParamSpec * pspec)281 gst_h264_decoder_set_property (GObject * object, guint property_id,
282     const GValue * value, GParamSpec * pspec)
283 {
284   GstH264Decoder *self = GST_H264_DECODER (object);
285   GstH264DecoderPrivate *priv = self->priv;
286 
287   switch (property_id) {
288     case PROP_COMPLIANCE:
289       GST_OBJECT_LOCK (self);
290       priv->compliance = g_value_get_enum (value);
291       GST_OBJECT_UNLOCK (self);
292       break;
293     default:
294       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
295       break;
296   }
297 }
298 
299 static void
gst_h264_decoder_class_init(GstH264DecoderClass * klass)300 gst_h264_decoder_class_init (GstH264DecoderClass * klass)
301 {
302   GstVideoDecoderClass *decoder_class = GST_VIDEO_DECODER_CLASS (klass);
303   GObjectClass *object_class = G_OBJECT_CLASS (klass);
304 
305   object_class->finalize = GST_DEBUG_FUNCPTR (gst_h264_decoder_finalize);
306   object_class->get_property = gst_h264_decoder_get_property;
307   object_class->set_property = gst_h264_decoder_set_property;
308 
309   decoder_class->start = GST_DEBUG_FUNCPTR (gst_h264_decoder_start);
310   decoder_class->stop = GST_DEBUG_FUNCPTR (gst_h264_decoder_stop);
311   decoder_class->set_format = GST_DEBUG_FUNCPTR (gst_h264_decoder_set_format);
312   decoder_class->finish = GST_DEBUG_FUNCPTR (gst_h264_decoder_finish);
313   decoder_class->flush = GST_DEBUG_FUNCPTR (gst_h264_decoder_flush);
314   decoder_class->drain = GST_DEBUG_FUNCPTR (gst_h264_decoder_drain);
315   decoder_class->handle_frame =
316       GST_DEBUG_FUNCPTR (gst_h264_decoder_handle_frame);
317 
318   /**
319    * GstH264Decoder:compliance:
320    *
321    * The compliance controls the behavior of the decoder to handle some
322    * subtle cases and contexts, such as the low-latency DPB bumping or
323    * mapping the baseline profile as the constrained-baseline profile,
324    * etc.
325    *
326    * Since: 1.20
327    */
328   g_object_class_install_property (object_class, PROP_COMPLIANCE,
329       g_param_spec_enum ("compliance", "Decoder Compliance",
330           "The decoder's behavior in compliance with the h264 spec.",
331           GST_TYPE_H264_DECODER_COMPLIANCE, GST_H264_DECODER_COMPLIANCE_AUTO,
332           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT));
333 }
334 
335 static void
gst_h264_decoder_init(GstH264Decoder * self)336 gst_h264_decoder_init (GstH264Decoder * self)
337 {
338   GstH264DecoderPrivate *priv;
339 
340   gst_video_decoder_set_packetized (GST_VIDEO_DECODER (self), TRUE);
341 
342   self->priv = priv = gst_h264_decoder_get_instance_private (self);
343 
344   priv->last_output_poc = G_MININT32;
345 
346   priv->ref_pic_list_p0 = g_array_sized_new (FALSE, TRUE,
347       sizeof (GstH264Picture *), 32);
348   g_array_set_clear_func (priv->ref_pic_list_p0,
349       (GDestroyNotify) gst_h264_picture_clear);
350 
351   priv->ref_pic_list_b0 = g_array_sized_new (FALSE, TRUE,
352       sizeof (GstH264Picture *), 32);
353   g_array_set_clear_func (priv->ref_pic_list_b0,
354       (GDestroyNotify) gst_h264_picture_clear);
355 
356   priv->ref_pic_list_b1 = g_array_sized_new (FALSE, TRUE,
357       sizeof (GstH264Picture *), 32);
358   g_array_set_clear_func (priv->ref_pic_list_b1,
359       (GDestroyNotify) gst_h264_picture_clear);
360 
361   priv->ref_frame_list_0_short_term = g_array_sized_new (FALSE, TRUE,
362       sizeof (GstH264Picture *), 32);
363   g_array_set_clear_func (priv->ref_frame_list_0_short_term,
364       (GDestroyNotify) gst_h264_picture_clear);
365 
366   priv->ref_frame_list_1_short_term = g_array_sized_new (FALSE, TRUE,
367       sizeof (GstH264Picture *), 32);
368   g_array_set_clear_func (priv->ref_frame_list_1_short_term,
369       (GDestroyNotify) gst_h264_picture_clear);
370 
371   priv->ref_frame_list_long_term = g_array_sized_new (FALSE, TRUE,
372       sizeof (GstH264Picture *), 32);
373   g_array_set_clear_func (priv->ref_frame_list_long_term,
374       (GDestroyNotify) gst_h264_picture_clear);
375 
376   priv->ref_pic_list0 = g_array_sized_new (FALSE, TRUE,
377       sizeof (GstH264Picture *), 32);
378   priv->ref_pic_list1 = g_array_sized_new (FALSE, TRUE,
379       sizeof (GstH264Picture *), 32);
380 
381   priv->output_queue =
382       gst_queue_array_new_for_struct (sizeof (GstH264DecoderOutputFrame), 1);
383   gst_queue_array_set_clear_func (priv->output_queue,
384       (GDestroyNotify) gst_h264_decoder_clear_output_frame);
385 }
386 
387 static void
gst_h264_decoder_finalize(GObject * object)388 gst_h264_decoder_finalize (GObject * object)
389 {
390   GstH264Decoder *self = GST_H264_DECODER (object);
391   GstH264DecoderPrivate *priv = self->priv;
392 
393   g_array_unref (priv->ref_pic_list_p0);
394   g_array_unref (priv->ref_pic_list_b0);
395   g_array_unref (priv->ref_pic_list_b1);
396   g_array_unref (priv->ref_frame_list_0_short_term);
397   g_array_unref (priv->ref_frame_list_1_short_term);
398   g_array_unref (priv->ref_frame_list_long_term);
399   g_array_unref (priv->ref_pic_list0);
400   g_array_unref (priv->ref_pic_list1);
401   gst_queue_array_free (priv->output_queue);
402 
403   G_OBJECT_CLASS (parent_class)->finalize (object);
404 }
405 
406 static void
gst_h264_decoder_reset(GstH264Decoder * self)407 gst_h264_decoder_reset (GstH264Decoder * self)
408 {
409   GstH264DecoderPrivate *priv = self->priv;
410 
411   gst_clear_buffer (&priv->codec_data);
412   g_clear_pointer (&self->input_state, gst_video_codec_state_unref);
413   g_clear_pointer (&priv->parser, gst_h264_nal_parser_free);
414   g_clear_pointer (&priv->dpb, gst_h264_dpb_free);
415   gst_h264_picture_clear (&priv->last_field);
416 
417   priv->profile_idc = 0;
418   priv->width = 0;
419   priv->height = 0;
420   priv->nal_length_size = 4;
421 }
422 
423 static gboolean
gst_h264_decoder_start(GstVideoDecoder * decoder)424 gst_h264_decoder_start (GstVideoDecoder * decoder)
425 {
426   GstH264Decoder *self = GST_H264_DECODER (decoder);
427   GstH264DecoderPrivate *priv = self->priv;
428 
429   gst_h264_decoder_reset (self);
430 
431   priv->parser = gst_h264_nal_parser_new ();
432   priv->dpb = gst_h264_dpb_new ();
433 
434   return TRUE;
435 }
436 
437 static gboolean
gst_h264_decoder_stop(GstVideoDecoder * decoder)438 gst_h264_decoder_stop (GstVideoDecoder * decoder)
439 {
440   GstH264Decoder *self = GST_H264_DECODER (decoder);
441 
442   gst_h264_decoder_reset (self);
443 
444   return TRUE;
445 }
446 
447 static void
gst_h264_decoder_clear_output_frame(GstH264DecoderOutputFrame * output_frame)448 gst_h264_decoder_clear_output_frame (GstH264DecoderOutputFrame * output_frame)
449 {
450   if (!output_frame)
451     return;
452 
453   if (output_frame->frame) {
454     gst_video_decoder_release_frame (GST_VIDEO_DECODER (output_frame->self),
455         output_frame->frame);
456     output_frame->frame = NULL;
457   }
458 
459   gst_h264_picture_clear (&output_frame->picture);
460 }
461 
462 static void
gst_h264_decoder_clear_dpb(GstH264Decoder * self,gboolean flush)463 gst_h264_decoder_clear_dpb (GstH264Decoder * self, gboolean flush)
464 {
465   GstVideoDecoder *decoder = GST_VIDEO_DECODER (self);
466   GstH264DecoderPrivate *priv = self->priv;
467   GstH264Picture *picture;
468 
469   /* If we are not flushing now, videodecoder baseclass will hold
470    * GstVideoCodecFrame. Release frames manually */
471   if (!flush) {
472     while ((picture = gst_h264_dpb_bump (priv->dpb, TRUE)) != NULL) {
473       GstVideoCodecFrame *frame = gst_video_decoder_get_frame (decoder,
474           picture->system_frame_number);
475 
476       if (frame)
477         gst_video_decoder_release_frame (decoder, frame);
478       gst_h264_picture_unref (picture);
479     }
480   }
481 
482   gst_queue_array_clear (priv->output_queue);
483   gst_h264_decoder_clear_ref_pic_lists (self);
484   gst_h264_picture_clear (&priv->last_field);
485   gst_h264_dpb_clear (priv->dpb);
486   priv->last_output_poc = G_MININT32;
487 }
488 
489 static gboolean
gst_h264_decoder_flush(GstVideoDecoder * decoder)490 gst_h264_decoder_flush (GstVideoDecoder * decoder)
491 {
492   GstH264Decoder *self = GST_H264_DECODER (decoder);
493 
494   gst_h264_decoder_clear_dpb (self, TRUE);
495 
496   return TRUE;
497 }
498 
499 static GstFlowReturn
gst_h264_decoder_drain(GstVideoDecoder * decoder)500 gst_h264_decoder_drain (GstVideoDecoder * decoder)
501 {
502   GstH264Decoder *self = GST_H264_DECODER (decoder);
503 
504   /* dpb will be cleared by this method */
505   return gst_h264_decoder_drain_internal (self);
506 }
507 
508 static GstFlowReturn
gst_h264_decoder_finish(GstVideoDecoder * decoder)509 gst_h264_decoder_finish (GstVideoDecoder * decoder)
510 {
511   return gst_h264_decoder_drain (decoder);
512 }
513 
514 static GstFlowReturn
gst_h264_decoder_handle_frame(GstVideoDecoder * decoder,GstVideoCodecFrame * frame)515 gst_h264_decoder_handle_frame (GstVideoDecoder * decoder,
516     GstVideoCodecFrame * frame)
517 {
518   GstH264Decoder *self = GST_H264_DECODER (decoder);
519   GstH264DecoderPrivate *priv = self->priv;
520   GstBuffer *in_buf = frame->input_buffer;
521   GstH264NalUnit nalu;
522   GstH264ParserResult pres;
523   GstMapInfo map;
524   GstFlowReturn decode_ret = GST_FLOW_OK;
525 
526   GST_LOG_OBJECT (self,
527       "handle frame, PTS: %" GST_TIME_FORMAT ", DTS: %"
528       GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_PTS (in_buf)),
529       GST_TIME_ARGS (GST_BUFFER_DTS (in_buf)));
530 
531   priv->current_frame = frame;
532 
533   gst_buffer_map (in_buf, &map, GST_MAP_READ);
534   if (priv->in_format == GST_H264_DECODER_FORMAT_AVC) {
535     pres = gst_h264_parser_identify_nalu_avc (priv->parser,
536         map.data, 0, map.size, priv->nal_length_size, &nalu);
537 
538     while (pres == GST_H264_PARSER_OK && decode_ret == GST_FLOW_OK) {
539       decode_ret = gst_h264_decoder_decode_nal (self, &nalu);
540 
541       pres = gst_h264_parser_identify_nalu_avc (priv->parser,
542           map.data, nalu.offset + nalu.size, map.size, priv->nal_length_size,
543           &nalu);
544     }
545   } else {
546     pres = gst_h264_parser_identify_nalu (priv->parser,
547         map.data, 0, map.size, &nalu);
548 
549     if (pres == GST_H264_PARSER_NO_NAL_END)
550       pres = GST_H264_PARSER_OK;
551 
552     while (pres == GST_H264_PARSER_OK && decode_ret == GST_FLOW_OK) {
553       decode_ret = gst_h264_decoder_decode_nal (self, &nalu);
554 
555       pres = gst_h264_parser_identify_nalu (priv->parser,
556           map.data, nalu.offset + nalu.size, map.size, &nalu);
557 
558       if (pres == GST_H264_PARSER_NO_NAL_END)
559         pres = GST_H264_PARSER_OK;
560     }
561   }
562 
563   gst_buffer_unmap (in_buf, &map);
564 
565   if (decode_ret != GST_FLOW_OK) {
566     if (decode_ret == GST_FLOW_ERROR) {
567       GST_VIDEO_DECODER_ERROR (self, 1, STREAM, DECODE,
568           ("Failed to decode data"), (NULL), decode_ret);
569     }
570 
571     gst_video_decoder_drop_frame (decoder, frame);
572     gst_h264_picture_clear (&priv->current_picture);
573     priv->current_frame = NULL;
574 
575     return decode_ret;
576   }
577 
578   gst_h264_decoder_finish_current_picture (self, &decode_ret);
579   gst_video_codec_frame_unref (frame);
580   priv->current_frame = NULL;
581 
582   if (decode_ret == GST_FLOW_ERROR) {
583     GST_VIDEO_DECODER_ERROR (self, 1, STREAM, DECODE,
584         ("Failed to decode data"), (NULL), decode_ret);
585   }
586 
587   return decode_ret;
588 }
589 
590 static GstFlowReturn
gst_h264_decoder_parse_sps(GstH264Decoder * self,GstH264NalUnit * nalu)591 gst_h264_decoder_parse_sps (GstH264Decoder * self, GstH264NalUnit * nalu)
592 {
593   GstH264DecoderPrivate *priv = self->priv;
594   GstH264SPS sps;
595   GstH264ParserResult pres;
596   GstFlowReturn ret;
597 
598   pres = gst_h264_parse_sps (nalu, &sps);
599   if (pres != GST_H264_PARSER_OK) {
600     GST_WARNING_OBJECT (self, "Failed to parse SPS, result %d", pres);
601     return GST_FLOW_ERROR;
602   }
603 
604   GST_LOG_OBJECT (self, "SPS parsed");
605 
606   ret = gst_h264_decoder_process_sps (self, &sps);
607   if (ret != GST_FLOW_OK) {
608     GST_WARNING_OBJECT (self, "Failed to process SPS");
609   } else if (gst_h264_parser_update_sps (priv->parser,
610           &sps) != GST_H264_PARSER_OK) {
611     GST_WARNING_OBJECT (self, "Failed to update SPS");
612     ret = GST_FLOW_ERROR;
613   }
614 
615   gst_h264_sps_clear (&sps);
616 
617   return ret;
618 }
619 
620 static GstFlowReturn
gst_h264_decoder_parse_pps(GstH264Decoder * self,GstH264NalUnit * nalu)621 gst_h264_decoder_parse_pps (GstH264Decoder * self, GstH264NalUnit * nalu)
622 {
623   GstH264DecoderPrivate *priv = self->priv;
624   GstH264PPS pps;
625   GstH264ParserResult pres;
626   GstFlowReturn ret = GST_FLOW_OK;
627 
628   pres = gst_h264_parse_pps (priv->parser, nalu, &pps);
629   if (pres != GST_H264_PARSER_OK) {
630     GST_WARNING_OBJECT (self, "Failed to parse PPS, result %d", pres);
631     return GST_FLOW_ERROR;
632   }
633 
634   GST_LOG_OBJECT (self, "PPS parsed");
635 
636   if (pps.num_slice_groups_minus1 > 0) {
637     GST_FIXME_OBJECT (self, "FMO is not supported");
638     ret = GST_FLOW_ERROR;
639   } else if (gst_h264_parser_update_pps (priv->parser, &pps)
640       != GST_H264_PARSER_OK) {
641     GST_WARNING_OBJECT (self, "Failed to update PPS");
642     ret = GST_FLOW_ERROR;
643   }
644 
645   gst_h264_pps_clear (&pps);
646 
647   return ret;
648 }
649 
650 static GstFlowReturn
gst_h264_decoder_parse_codec_data(GstH264Decoder * self,const guint8 * data,gsize size)651 gst_h264_decoder_parse_codec_data (GstH264Decoder * self, const guint8 * data,
652     gsize size)
653 {
654   GstH264DecoderPrivate *priv = self->priv;
655   guint num_sps, num_pps;
656   guint off;
657   gint i;
658   GstH264ParserResult pres;
659   GstH264NalUnit nalu;
660   GstFlowReturn ret = GST_FLOW_OK;
661 #ifndef GST_DISABLE_GST_DEBUG
662   guint profile;
663 #endif
664 
665   /* parse the avcC data */
666   if (size < 7) {               /* when numSPS==0 and numPPS==0, length is 7 bytes */
667     return GST_FLOW_ERROR;
668   }
669 
670   /* parse the version, this must be 1 */
671   if (data[0] != 1) {
672     return GST_FLOW_ERROR;
673   }
674 #ifndef GST_DISABLE_GST_DEBUG
675   /* AVCProfileIndication */
676   /* profile_compat */
677   /* AVCLevelIndication */
678   profile = (data[1] << 16) | (data[2] << 8) | data[3];
679   GST_DEBUG_OBJECT (self, "profile %06x", profile);
680 #endif
681 
682   /* 6 bits reserved | 2 bits lengthSizeMinusOne */
683   /* this is the number of bytes in front of the NAL units to mark their
684    * length */
685   priv->nal_length_size = (data[4] & 0x03) + 1;
686   GST_DEBUG_OBJECT (self, "nal length size %u", priv->nal_length_size);
687 
688   num_sps = data[5] & 0x1f;
689   off = 6;
690   for (i = 0; i < num_sps; i++) {
691     pres = gst_h264_parser_identify_nalu_avc (priv->parser,
692         data, off, size, 2, &nalu);
693     if (pres != GST_H264_PARSER_OK) {
694       GST_WARNING_OBJECT (self, "Failed to identify SPS nalu");
695       return GST_FLOW_ERROR;
696     }
697 
698     ret = gst_h264_decoder_parse_sps (self, &nalu);
699     if (ret != GST_FLOW_OK) {
700       GST_WARNING_OBJECT (self, "Failed to parse SPS");
701       return ret;
702     }
703     off = nalu.offset + nalu.size;
704   }
705 
706   if (off >= size) {
707     GST_WARNING_OBJECT (self, "Too small avcC");
708     return GST_FLOW_ERROR;
709   }
710 
711   num_pps = data[off];
712   off++;
713 
714   for (i = 0; i < num_pps; i++) {
715     pres = gst_h264_parser_identify_nalu_avc (priv->parser,
716         data, off, size, 2, &nalu);
717     if (pres != GST_H264_PARSER_OK) {
718       GST_WARNING_OBJECT (self, "Failed to identify PPS nalu");
719       return GST_FLOW_ERROR;
720     }
721 
722     ret = gst_h264_decoder_parse_pps (self, &nalu);
723     if (ret != GST_FLOW_OK) {
724       GST_WARNING_OBJECT (self, "Failed to parse PPS");
725       return ret;
726     }
727     off = nalu.offset + nalu.size;
728   }
729 
730   return GST_FLOW_OK;
731 }
732 
733 static gboolean
gst_h264_decoder_preprocess_slice(GstH264Decoder * self,GstH264Slice * slice)734 gst_h264_decoder_preprocess_slice (GstH264Decoder * self, GstH264Slice * slice)
735 {
736   GstH264DecoderPrivate *priv = self->priv;
737 
738   if (!priv->current_picture) {
739     if (slice->header.first_mb_in_slice != 0) {
740       GST_ERROR_OBJECT (self, "Invalid stream, first_mb_in_slice %d",
741           slice->header.first_mb_in_slice);
742       return FALSE;
743     }
744   }
745 
746   return TRUE;
747 }
748 
749 static void
gst_h264_decoder_update_pic_nums(GstH264Decoder * self,GstH264Picture * current_picture,gint frame_num)750 gst_h264_decoder_update_pic_nums (GstH264Decoder * self,
751     GstH264Picture * current_picture, gint frame_num)
752 {
753   GstH264DecoderPrivate *priv = self->priv;
754   GArray *dpb = gst_h264_dpb_get_pictures_all (priv->dpb);
755   gint i;
756 
757   for (i = 0; i < dpb->len; i++) {
758     GstH264Picture *picture = g_array_index (dpb, GstH264Picture *, i);
759 
760     if (!GST_H264_PICTURE_IS_REF (picture))
761       continue;
762 
763     if (GST_H264_PICTURE_IS_LONG_TERM_REF (picture)) {
764       if (GST_H264_PICTURE_IS_FRAME (current_picture))
765         picture->long_term_pic_num = picture->long_term_frame_idx;
766       else if (current_picture->field == picture->field)
767         picture->long_term_pic_num = 2 * picture->long_term_frame_idx + 1;
768       else
769         picture->long_term_pic_num = 2 * picture->long_term_frame_idx;
770     } else {
771       if (picture->frame_num > frame_num)
772         picture->frame_num_wrap = picture->frame_num - priv->max_frame_num;
773       else
774         picture->frame_num_wrap = picture->frame_num;
775 
776       if (GST_H264_PICTURE_IS_FRAME (current_picture))
777         picture->pic_num = picture->frame_num_wrap;
778       else if (picture->field == current_picture->field)
779         picture->pic_num = 2 * picture->frame_num_wrap + 1;
780       else
781         picture->pic_num = 2 * picture->frame_num_wrap;
782     }
783   }
784 
785   g_array_unref (dpb);
786 }
787 
788 static GstH264Picture *
gst_h264_decoder_split_frame(GstH264Decoder * self,GstH264Picture * picture)789 gst_h264_decoder_split_frame (GstH264Decoder * self, GstH264Picture * picture)
790 {
791   GstH264Picture *other_field;
792 
793   g_assert (GST_H264_PICTURE_IS_FRAME (picture));
794 
795   other_field = gst_h264_decoder_new_field_picture (self, picture);
796   if (!other_field) {
797     GST_WARNING_OBJECT (self,
798         "Couldn't split frame into complementary field pair");
799     return NULL;
800   }
801 
802   GST_LOG_OBJECT (self, "Split picture %p, poc %d, frame num %d",
803       picture, picture->pic_order_cnt, picture->frame_num);
804 
805   /* FIXME: enhance TFF decision by using picture timing SEI */
806   if (picture->top_field_order_cnt < picture->bottom_field_order_cnt) {
807     picture->field = GST_H264_PICTURE_FIELD_TOP_FIELD;
808     picture->pic_order_cnt = picture->top_field_order_cnt;
809 
810     other_field->field = GST_H264_PICTURE_FIELD_BOTTOM_FIELD;
811     other_field->pic_order_cnt = picture->bottom_field_order_cnt;
812   } else {
813     picture->field = GST_H264_PICTURE_FIELD_BOTTOM_FIELD;
814     picture->pic_order_cnt = picture->bottom_field_order_cnt;
815 
816     other_field->field = GST_H264_PICTURE_FIELD_TOP_FIELD;
817     other_field->pic_order_cnt = picture->top_field_order_cnt;
818   }
819 
820   other_field->top_field_order_cnt = picture->top_field_order_cnt;
821   other_field->bottom_field_order_cnt = picture->bottom_field_order_cnt;
822   other_field->frame_num = picture->frame_num;
823   other_field->ref = picture->ref;
824   other_field->nonexisting = picture->nonexisting;
825   other_field->system_frame_number = picture->system_frame_number;
826 
827   return other_field;
828 }
829 
830 static void
output_picture_directly(GstH264Decoder * self,GstH264Picture * picture,GstFlowReturn * ret)831 output_picture_directly (GstH264Decoder * self, GstH264Picture * picture,
832     GstFlowReturn * ret)
833 {
834   GstH264DecoderPrivate *priv = self->priv;
835   GstH264Picture *out_pic = NULL;
836   GstFlowReturn flow_ret = GST_FLOW_OK;
837 
838   g_assert (ret != NULL);
839 
840   if (GST_H264_PICTURE_IS_FRAME (picture)) {
841     g_assert (priv->last_field == NULL);
842     out_pic = g_steal_pointer (&picture);
843     goto output;
844   }
845 
846   if (priv->last_field == NULL) {
847     if (picture->second_field) {
848       GST_WARNING ("Set the last output %p poc:%d, without first field",
849           picture, picture->pic_order_cnt);
850 
851       flow_ret = GST_FLOW_ERROR;
852       goto output;
853     }
854 
855     /* Just cache the first field. */
856     priv->last_field = g_steal_pointer (&picture);
857   } else {
858     if (!picture->second_field || !picture->other_field
859         || picture->other_field != priv->last_field) {
860       GST_WARNING ("The last field %p poc:%d is not the pair of the "
861           "current field %p poc:%d",
862           priv->last_field, priv->last_field->pic_order_cnt,
863           picture, picture->pic_order_cnt);
864 
865       gst_h264_picture_clear (&priv->last_field);
866       flow_ret = GST_FLOW_ERROR;
867       goto output;
868     }
869 
870     GST_TRACE ("Pair the last field %p poc:%d and the current"
871         " field %p poc:%d",
872         priv->last_field, priv->last_field->pic_order_cnt,
873         picture, picture->pic_order_cnt);
874 
875     out_pic = priv->last_field;
876     priv->last_field = NULL;
877     /* Link each field. */
878     out_pic->other_field = picture;
879   }
880 
881 output:
882   if (out_pic) {
883     gst_h264_dpb_set_last_output (priv->dpb, out_pic);
884     gst_h264_decoder_do_output_picture (self, out_pic, &flow_ret);
885   }
886 
887   gst_h264_picture_clear (&picture);
888 
889   UPDATE_FLOW_RETURN (ret, flow_ret);
890 }
891 
892 static void
add_picture_to_dpb(GstH264Decoder * self,GstH264Picture * picture)893 add_picture_to_dpb (GstH264Decoder * self, GstH264Picture * picture)
894 {
895   GstH264DecoderPrivate *priv = self->priv;
896 
897   if (!gst_h264_dpb_get_interlaced (priv->dpb)) {
898     g_assert (priv->last_field == NULL);
899     gst_h264_dpb_add (priv->dpb, picture);
900     return;
901   }
902 
903   /* The first field of the last picture may not be able to enter the
904      DPB if it is a non ref, but if the second field enters the DPB, we
905      need to add both of them. */
906   if (priv->last_field && picture->other_field == priv->last_field) {
907     gst_h264_dpb_add (priv->dpb, priv->last_field);
908     priv->last_field = NULL;
909   }
910 
911   gst_h264_dpb_add (priv->dpb, picture);
912 }
913 
914 static void
_bump_dpb(GstH264Decoder * self,GstH264DpbBumpMode bump_level,GstH264Picture * current_picture,GstFlowReturn * ret)915 _bump_dpb (GstH264Decoder * self, GstH264DpbBumpMode bump_level,
916     GstH264Picture * current_picture, GstFlowReturn * ret)
917 {
918   GstH264DecoderPrivate *priv = self->priv;
919 
920   g_assert (ret != NULL);
921 
922   while (gst_h264_dpb_needs_bump (priv->dpb, current_picture, bump_level)) {
923     GstH264Picture *to_output;
924 
925     to_output = gst_h264_dpb_bump (priv->dpb, FALSE);
926 
927     if (!to_output) {
928       GST_WARNING_OBJECT (self, "Bumping is needed but no picture to output");
929       break;
930     }
931 
932     gst_h264_decoder_do_output_picture (self, to_output, ret);
933   }
934 }
935 
936 static GstFlowReturn
gst_h264_decoder_handle_frame_num_gap(GstH264Decoder * self,gint frame_num)937 gst_h264_decoder_handle_frame_num_gap (GstH264Decoder * self, gint frame_num)
938 {
939   GstH264DecoderPrivate *priv = self->priv;
940   const GstH264SPS *sps = priv->active_sps;
941   gint unused_short_term_frame_num;
942 
943   if (!sps) {
944     GST_ERROR_OBJECT (self, "No active sps");
945     return GST_FLOW_ERROR;
946   }
947 
948   if (priv->prev_ref_frame_num == frame_num) {
949     GST_TRACE_OBJECT (self,
950         "frame_num == PrevRefFrameNum (%d), not a gap", frame_num);
951     return GST_FLOW_OK;
952   }
953 
954   if (((priv->prev_ref_frame_num + 1) % priv->max_frame_num) == frame_num) {
955     GST_TRACE_OBJECT (self,
956         "frame_num ==  (PrevRefFrameNum + 1) %% MaxFrameNum (%d), not a gap",
957         frame_num);
958     return GST_FLOW_OK;
959   }
960 
961   if (gst_h264_dpb_get_size (priv->dpb) == 0) {
962     GST_TRACE_OBJECT (self, "DPB is empty, not a gap");
963     return GST_FLOW_OK;
964   }
965 
966   if (!sps->gaps_in_frame_num_value_allowed_flag) {
967     /* This is likely the case where some frames were dropped.
968      * then we need to keep decoding without error out */
969     GST_WARNING_OBJECT (self, "Invalid frame num %d, maybe frame drop",
970         frame_num);
971 
972     return GST_FLOW_OK;
973   }
974 
975   GST_DEBUG_OBJECT (self, "Handling frame num gap %d -> %d (MaxFrameNum: %d)",
976       priv->prev_ref_frame_num, frame_num, priv->max_frame_num);
977 
978   /* 7.4.3/7-23 */
979   unused_short_term_frame_num =
980       (priv->prev_ref_frame_num + 1) % priv->max_frame_num;
981   while (unused_short_term_frame_num != frame_num) {
982     GstH264Picture *picture = gst_h264_picture_new ();
983     GstFlowReturn ret = GST_FLOW_OK;
984 
985     if (!gst_h264_decoder_init_gap_picture (self, picture,
986             unused_short_term_frame_num))
987       return GST_FLOW_ERROR;
988 
989     gst_h264_decoder_update_pic_nums (self, picture,
990         unused_short_term_frame_num);
991 
992     /* C.2.1 */
993     if (!gst_h264_decoder_sliding_window_picture_marking (self, picture)) {
994       GST_ERROR_OBJECT (self,
995           "Couldn't perform sliding window picture marking");
996       return GST_FLOW_ERROR;
997     }
998 
999     gst_h264_dpb_delete_unused (priv->dpb);
1000 
1001     _bump_dpb (self, GST_H264_DPB_BUMP_NORMAL_LATENCY, picture, &ret);
1002     if (ret != GST_FLOW_OK)
1003       return ret;
1004 
1005     /* the picture is short term ref, add to DPB. */
1006     if (gst_h264_dpb_get_interlaced (priv->dpb)) {
1007       GstH264Picture *other_field =
1008           gst_h264_decoder_split_frame (self, picture);
1009 
1010       add_picture_to_dpb (self, picture);
1011       add_picture_to_dpb (self, other_field);
1012     } else {
1013       add_picture_to_dpb (self, picture);
1014     }
1015 
1016     unused_short_term_frame_num++;
1017     unused_short_term_frame_num %= priv->max_frame_num;
1018   }
1019 
1020   return GST_FLOW_OK;
1021 }
1022 
1023 static gboolean
gst_h264_decoder_init_current_picture(GstH264Decoder * self)1024 gst_h264_decoder_init_current_picture (GstH264Decoder * self)
1025 {
1026   GstH264DecoderPrivate *priv = self->priv;
1027 
1028   if (!gst_h264_decoder_fill_picture_from_slice (self, &priv->current_slice,
1029           priv->current_picture)) {
1030     return FALSE;
1031   }
1032 
1033   if (!gst_h264_decoder_calculate_poc (self, priv->current_picture))
1034     return FALSE;
1035 
1036   /* If the slice header indicates we will have to perform reference marking
1037    * process after this picture is decoded, store required data for that
1038    * purpose */
1039   if (priv->current_slice.header.
1040       dec_ref_pic_marking.adaptive_ref_pic_marking_mode_flag) {
1041     priv->current_picture->dec_ref_pic_marking =
1042         priv->current_slice.header.dec_ref_pic_marking;
1043   }
1044 
1045   return TRUE;
1046 }
1047 
1048 static GstFlowReturn
gst_h264_decoder_start_current_picture(GstH264Decoder * self)1049 gst_h264_decoder_start_current_picture (GstH264Decoder * self)
1050 {
1051   GstH264DecoderClass *klass;
1052   GstH264DecoderPrivate *priv = self->priv;
1053   const GstH264SPS *sps;
1054   gint frame_num;
1055   GstFlowReturn ret = GST_FLOW_OK;
1056   GstH264Picture *current_picture;
1057 
1058   g_assert (priv->current_picture != NULL);
1059   g_assert (priv->active_sps != NULL);
1060   g_assert (priv->active_pps != NULL);
1061 
1062   sps = priv->active_sps;
1063 
1064   priv->max_frame_num = sps->max_frame_num;
1065   frame_num = priv->current_slice.header.frame_num;
1066   if (priv->current_slice.nalu.idr_pic_flag)
1067     priv->prev_ref_frame_num = 0;
1068 
1069   ret = gst_h264_decoder_handle_frame_num_gap (self, frame_num);
1070   if (ret != GST_FLOW_OK)
1071     return ret;
1072 
1073   if (!gst_h264_decoder_init_current_picture (self))
1074     return GST_FLOW_ERROR;
1075 
1076   current_picture = priv->current_picture;
1077 
1078   /* If the new picture is an IDR, flush DPB */
1079   if (current_picture->idr) {
1080     if (!current_picture->dec_ref_pic_marking.no_output_of_prior_pics_flag) {
1081       ret = gst_h264_decoder_drain_internal (self);
1082       if (ret != GST_FLOW_OK)
1083         return ret;
1084     } else {
1085       /* C.4.4 Removal of pictures from the DPB before possible insertion
1086        * of the current picture
1087        *
1088        * If decoded picture is IDR and no_output_of_prior_pics_flag is equal to 1
1089        * or is inferred to be equal to 1, all frame buffers in the DPB
1090        * are emptied without output of the pictures they contain,
1091        * and DPB fullness is set to 0.
1092        */
1093       gst_h264_decoder_clear_dpb (self, FALSE);
1094     }
1095   }
1096 
1097   gst_h264_decoder_update_pic_nums (self, current_picture, frame_num);
1098 
1099   if (priv->process_ref_pic_lists)
1100     gst_h264_decoder_prepare_ref_pic_lists (self, current_picture);
1101 
1102   klass = GST_H264_DECODER_GET_CLASS (self);
1103   if (klass->start_picture) {
1104     ret = klass->start_picture (self, priv->current_picture,
1105         &priv->current_slice, priv->dpb);
1106 
1107     if (ret != GST_FLOW_OK) {
1108       GST_WARNING_OBJECT (self, "subclass does not want to start picture");
1109       return ret;
1110     }
1111   }
1112 
1113   return GST_FLOW_OK;
1114 }
1115 
1116 static GstH264Picture *
gst_h264_decoder_new_field_picture(GstH264Decoder * self,GstH264Picture * picture)1117 gst_h264_decoder_new_field_picture (GstH264Decoder * self,
1118     GstH264Picture * picture)
1119 {
1120   GstH264DecoderClass *klass = GST_H264_DECODER_GET_CLASS (self);
1121   GstH264Picture *new_picture;
1122 
1123   if (!klass->new_field_picture) {
1124     GST_WARNING_OBJECT (self, "Subclass does not support interlaced stream");
1125     return NULL;
1126   }
1127 
1128   new_picture = gst_h264_picture_new ();
1129   /* don't confuse subclass by non-existing picture */
1130   if (!picture->nonexisting) {
1131     GstFlowReturn ret;
1132 
1133     ret = klass->new_field_picture (self, picture, new_picture);
1134     if (ret != GST_FLOW_OK) {
1135       GST_WARNING_OBJECT (self, "Subclass couldn't handle new field picture");
1136       gst_h264_picture_unref (new_picture);
1137 
1138       return NULL;
1139     }
1140   }
1141 
1142   new_picture->other_field = picture;
1143   new_picture->second_field = TRUE;
1144 
1145   return new_picture;
1146 }
1147 
1148 static gboolean
gst_h264_decoder_find_first_field_picture(GstH264Decoder * self,GstH264Slice * slice,GstH264Picture ** first_field)1149 gst_h264_decoder_find_first_field_picture (GstH264Decoder * self,
1150     GstH264Slice * slice, GstH264Picture ** first_field)
1151 {
1152   GstH264DecoderPrivate *priv = self->priv;
1153   const GstH264SliceHdr *slice_hdr = &slice->header;
1154   GstH264Picture *prev_field;
1155   gboolean in_dpb;
1156 
1157   *first_field = NULL;
1158   prev_field = NULL;
1159   in_dpb = FALSE;
1160   if (gst_h264_dpb_get_interlaced (priv->dpb)) {
1161     if (priv->last_field) {
1162       prev_field = priv->last_field;
1163       in_dpb = FALSE;
1164     } else if (gst_h264_dpb_get_size (priv->dpb) > 0) {
1165       GstH264Picture *prev_picture;
1166       GArray *pictures;
1167 
1168       pictures = gst_h264_dpb_get_pictures_all (priv->dpb);
1169       prev_picture =
1170           g_array_index (pictures, GstH264Picture *, pictures->len - 1);
1171       g_array_unref (pictures); /* prev_picture should be held */
1172 
1173       /* Previous picture was a field picture. */
1174       if (!GST_H264_PICTURE_IS_FRAME (prev_picture)
1175           && !prev_picture->other_field) {
1176         prev_field = prev_picture;
1177         in_dpb = TRUE;
1178       }
1179     }
1180   } else {
1181     g_assert (priv->last_field == NULL);
1182   }
1183 
1184   /* This is not a field picture */
1185   if (!slice_hdr->field_pic_flag) {
1186     if (!prev_field)
1187       return TRUE;
1188 
1189     GST_WARNING_OBJECT (self, "Previous picture %p (poc %d) is not complete",
1190         prev_field, prev_field->pic_order_cnt);
1191     goto error;
1192   }
1193 
1194   /* OK, this is the first field. */
1195   if (!prev_field)
1196     return TRUE;
1197 
1198   if (prev_field->frame_num != slice_hdr->frame_num) {
1199     GST_WARNING_OBJECT (self, "Previous picture %p (poc %d) is not complete",
1200         prev_field, prev_field->pic_order_cnt);
1201     goto error;
1202   } else {
1203     GstH264PictureField current_field = slice_hdr->bottom_field_flag ?
1204         GST_H264_PICTURE_FIELD_BOTTOM_FIELD : GST_H264_PICTURE_FIELD_TOP_FIELD;
1205 
1206     if (current_field == prev_field->field) {
1207       GST_WARNING_OBJECT (self,
1208           "Currnet picture and previous picture have identical field %d",
1209           current_field);
1210       goto error;
1211     }
1212   }
1213 
1214   *first_field = gst_h264_picture_ref (prev_field);
1215   return TRUE;
1216 
1217 error:
1218   if (!in_dpb) {
1219     gst_h264_picture_clear (&priv->last_field);
1220   } else {
1221     /* FIXME: implement fill gap field picture if it is already in DPB */
1222   }
1223 
1224   return FALSE;
1225 }
1226 
1227 static GstFlowReturn
gst_h264_decoder_parse_slice(GstH264Decoder * self,GstH264NalUnit * nalu)1228 gst_h264_decoder_parse_slice (GstH264Decoder * self, GstH264NalUnit * nalu)
1229 {
1230   GstH264DecoderPrivate *priv = self->priv;
1231   GstH264ParserResult pres = GST_H264_PARSER_OK;
1232   GstFlowReturn ret = GST_FLOW_OK;
1233 
1234   memset (&priv->current_slice, 0, sizeof (GstH264Slice));
1235 
1236   pres = gst_h264_parser_parse_slice_hdr (priv->parser, nalu,
1237       &priv->current_slice.header, TRUE, TRUE);
1238 
1239   if (pres != GST_H264_PARSER_OK) {
1240     GST_ERROR_OBJECT (self, "Failed to parse slice header, ret %d", pres);
1241     memset (&priv->current_slice, 0, sizeof (GstH264Slice));
1242 
1243     return GST_FLOW_ERROR;
1244   }
1245 
1246   priv->current_slice.nalu = *nalu;
1247 
1248   if (!gst_h264_decoder_preprocess_slice (self, &priv->current_slice))
1249     return GST_FLOW_ERROR;
1250 
1251   priv->active_pps = priv->current_slice.header.pps;
1252   priv->active_sps = priv->active_pps->sequence;
1253 
1254   /* Check whether field picture boundary within given codec frame.
1255    * This might happen in case that upstream sent buffer per frame unit,
1256    * not picture unit (i.e., AU unit).
1257    * If AU boundary is detected, then finish first field picture we decoded
1258    * in this chain, we should finish the current picture and
1259    * start new field picture decoding */
1260   if (gst_h264_dpb_get_interlaced (priv->dpb) && priv->current_picture &&
1261       !GST_H264_PICTURE_IS_FRAME (priv->current_picture) &&
1262       !priv->current_picture->second_field) {
1263     GstH264PictureField prev_field = priv->current_picture->field;
1264     GstH264PictureField cur_field = GST_H264_PICTURE_FIELD_FRAME;
1265     if (priv->current_slice.header.field_pic_flag)
1266       cur_field = priv->current_slice.header.bottom_field_flag ?
1267           GST_H264_PICTURE_FIELD_BOTTOM_FIELD :
1268           GST_H264_PICTURE_FIELD_TOP_FIELD;
1269 
1270     if (cur_field != prev_field) {
1271       GST_LOG_OBJECT (self,
1272           "Found new field picture, finishing the first field picture");
1273       gst_h264_decoder_finish_current_picture (self, &ret);
1274     }
1275   }
1276 
1277   if (!priv->current_picture) {
1278     GstH264DecoderClass *klass = GST_H264_DECODER_GET_CLASS (self);
1279     GstH264Picture *picture = NULL;
1280     GstH264Picture *first_field = NULL;
1281     GstFlowReturn ret = GST_FLOW_OK;
1282 
1283     g_assert (priv->current_frame);
1284 
1285     if (!gst_h264_decoder_find_first_field_picture (self,
1286             &priv->current_slice, &first_field)) {
1287       GST_ERROR_OBJECT (self, "Couldn't find or determine first picture");
1288       return GST_FLOW_ERROR;
1289     }
1290 
1291     if (first_field) {
1292       picture = gst_h264_decoder_new_field_picture (self, first_field);
1293       gst_h264_picture_unref (first_field);
1294 
1295       if (!picture) {
1296         GST_ERROR_OBJECT (self, "Couldn't duplicate the first field picture");
1297         return GST_FLOW_ERROR;
1298       }
1299     } else {
1300       picture = gst_h264_picture_new ();
1301 
1302       if (klass->new_picture)
1303         ret = klass->new_picture (self, priv->current_frame, picture);
1304 
1305       if (ret != GST_FLOW_OK) {
1306         GST_WARNING_OBJECT (self, "subclass does not want accept new picture");
1307         priv->current_picture = NULL;
1308         gst_h264_picture_unref (picture);
1309         return ret;
1310       }
1311     }
1312 
1313     /* This allows accessing the frame from the picture. */
1314     picture->system_frame_number = priv->current_frame->system_frame_number;
1315     priv->current_picture = picture;
1316 
1317     ret = gst_h264_decoder_start_current_picture (self);
1318     if (ret != GST_FLOW_OK) {
1319       GST_WARNING_OBJECT (self, "start picture failed");
1320       return ret;
1321     }
1322   }
1323 
1324   return gst_h264_decoder_decode_slice (self);
1325 }
1326 
1327 static GstFlowReturn
gst_h264_decoder_decode_nal(GstH264Decoder * self,GstH264NalUnit * nalu)1328 gst_h264_decoder_decode_nal (GstH264Decoder * self, GstH264NalUnit * nalu)
1329 {
1330   GstFlowReturn ret = GST_FLOW_OK;
1331 
1332   GST_LOG_OBJECT (self, "Parsed nal type: %d, offset %d, size %d",
1333       nalu->type, nalu->offset, nalu->size);
1334 
1335   switch (nalu->type) {
1336     case GST_H264_NAL_SPS:
1337       ret = gst_h264_decoder_parse_sps (self, nalu);
1338       break;
1339     case GST_H264_NAL_PPS:
1340       ret = gst_h264_decoder_parse_pps (self, nalu);
1341       break;
1342     case GST_H264_NAL_SLICE:
1343     case GST_H264_NAL_SLICE_DPA:
1344     case GST_H264_NAL_SLICE_DPB:
1345     case GST_H264_NAL_SLICE_DPC:
1346     case GST_H264_NAL_SLICE_IDR:
1347     case GST_H264_NAL_SLICE_EXT:
1348       ret = gst_h264_decoder_parse_slice (self, nalu);
1349       break;
1350     default:
1351       break;
1352   }
1353 
1354   return ret;
1355 }
1356 
1357 static void
gst_h264_decoder_format_from_caps(GstH264Decoder * self,GstCaps * caps,GstH264DecoderFormat * format,GstH264DecoderAlign * align)1358 gst_h264_decoder_format_from_caps (GstH264Decoder * self, GstCaps * caps,
1359     GstH264DecoderFormat * format, GstH264DecoderAlign * align)
1360 {
1361   if (format)
1362     *format = GST_H264_DECODER_FORMAT_NONE;
1363 
1364   if (align)
1365     *align = GST_H264_DECODER_ALIGN_NONE;
1366 
1367   if (!gst_caps_is_fixed (caps)) {
1368     GST_WARNING_OBJECT (self, "Caps wasn't fixed");
1369     return;
1370   }
1371 
1372   GST_DEBUG_OBJECT (self, "parsing caps: %" GST_PTR_FORMAT, caps);
1373 
1374   if (caps && gst_caps_get_size (caps) > 0) {
1375     GstStructure *s = gst_caps_get_structure (caps, 0);
1376     const gchar *str = NULL;
1377 
1378     if (format) {
1379       if ((str = gst_structure_get_string (s, "stream-format"))) {
1380         if (strcmp (str, "avc") == 0 || strcmp (str, "avc3") == 0)
1381           *format = GST_H264_DECODER_FORMAT_AVC;
1382         else if (strcmp (str, "byte-stream") == 0)
1383           *format = GST_H264_DECODER_FORMAT_BYTE;
1384       }
1385     }
1386 
1387     if (align) {
1388       if ((str = gst_structure_get_string (s, "alignment"))) {
1389         if (strcmp (str, "au") == 0)
1390           *align = GST_H264_DECODER_ALIGN_AU;
1391         else if (strcmp (str, "nal") == 0)
1392           *align = GST_H264_DECODER_ALIGN_NAL;
1393       }
1394     }
1395   }
1396 }
1397 
1398 static gboolean
gst_h264_decoder_set_format(GstVideoDecoder * decoder,GstVideoCodecState * state)1399 gst_h264_decoder_set_format (GstVideoDecoder * decoder,
1400     GstVideoCodecState * state)
1401 {
1402   GstH264Decoder *self = GST_H264_DECODER (decoder);
1403   GstH264DecoderPrivate *priv = self->priv;
1404   GstQuery *query;
1405 
1406   GST_DEBUG_OBJECT (decoder, "Set format");
1407 
1408   if (self->input_state)
1409     gst_video_codec_state_unref (self->input_state);
1410 
1411   self->input_state = gst_video_codec_state_ref (state);
1412 
1413   /* in case live streaming, we will run on low-latency mode */
1414   priv->is_live = FALSE;
1415   query = gst_query_new_latency ();
1416   if (gst_pad_peer_query (GST_VIDEO_DECODER_SINK_PAD (self), query))
1417     gst_query_parse_latency (query, &priv->is_live, NULL, NULL);
1418   gst_query_unref (query);
1419 
1420   if (priv->is_live)
1421     GST_DEBUG_OBJECT (self, "Live source, will run on low-latency mode");
1422 
1423   if (state->caps) {
1424     GstStructure *str;
1425     const GValue *codec_data_value;
1426     GstH264DecoderFormat format;
1427     GstH264DecoderAlign align;
1428 
1429     gst_h264_decoder_format_from_caps (self, state->caps, &format, &align);
1430 
1431     str = gst_caps_get_structure (state->caps, 0);
1432     codec_data_value = gst_structure_get_value (str, "codec_data");
1433 
1434     if (GST_VALUE_HOLDS_BUFFER (codec_data_value)) {
1435       gst_buffer_replace (&priv->codec_data,
1436           gst_value_get_buffer (codec_data_value));
1437     } else {
1438       gst_buffer_replace (&priv->codec_data, NULL);
1439     }
1440 
1441     if (format == GST_H264_DECODER_FORMAT_NONE) {
1442       /* codec_data implies avc */
1443       if (codec_data_value != NULL) {
1444         GST_WARNING_OBJECT (self,
1445             "video/x-h264 caps with codec_data but no stream-format=avc");
1446         format = GST_H264_DECODER_FORMAT_AVC;
1447       } else {
1448         /* otherwise assume bytestream input */
1449         GST_WARNING_OBJECT (self,
1450             "video/x-h264 caps without codec_data or stream-format");
1451         format = GST_H264_DECODER_FORMAT_BYTE;
1452       }
1453     }
1454 
1455     if (format == GST_H264_DECODER_FORMAT_AVC) {
1456       /* AVC requires codec_data, AVC3 might have one and/or SPS/PPS inline */
1457       if (codec_data_value == NULL) {
1458         /* Try it with size 4 anyway */
1459         priv->nal_length_size = 4;
1460         GST_WARNING_OBJECT (self,
1461             "avc format without codec data, assuming nal length size is 4");
1462       }
1463 
1464       /* AVC implies alignment=au */
1465       if (align == GST_H264_DECODER_ALIGN_NONE)
1466         align = GST_H264_DECODER_ALIGN_AU;
1467     }
1468 
1469     if (format == GST_H264_DECODER_FORMAT_BYTE) {
1470       if (codec_data_value != NULL) {
1471         GST_WARNING_OBJECT (self, "bytestream with codec data");
1472       }
1473     }
1474 
1475     priv->in_format = format;
1476     priv->align = align;
1477   }
1478 
1479   if (priv->codec_data) {
1480     GstMapInfo map;
1481 
1482     gst_buffer_map (priv->codec_data, &map, GST_MAP_READ);
1483     if (gst_h264_decoder_parse_codec_data (self, map.data, map.size) !=
1484         GST_FLOW_OK) {
1485       /* keep going without error.
1486        * Probably inband SPS/PPS might be valid data */
1487       GST_WARNING_OBJECT (self, "Failed to handle codec data");
1488     }
1489     gst_buffer_unmap (priv->codec_data, &map);
1490   }
1491 
1492   return TRUE;
1493 }
1494 
1495 static gboolean
gst_h264_decoder_fill_picture_from_slice(GstH264Decoder * self,const GstH264Slice * slice,GstH264Picture * picture)1496 gst_h264_decoder_fill_picture_from_slice (GstH264Decoder * self,
1497     const GstH264Slice * slice, GstH264Picture * picture)
1498 {
1499   GstH264DecoderClass *klass = GST_H264_DECODER_GET_CLASS (self);
1500   const GstH264SliceHdr *slice_hdr = &slice->header;
1501   const GstH264PPS *pps;
1502   const GstH264SPS *sps;
1503 
1504   pps = slice_hdr->pps;
1505   if (!pps) {
1506     GST_ERROR_OBJECT (self, "No pps in slice header");
1507     return FALSE;
1508   }
1509 
1510   sps = pps->sequence;
1511   if (!sps) {
1512     GST_ERROR_OBJECT (self, "No sps in pps");
1513     return FALSE;
1514   }
1515 
1516   picture->idr = slice->nalu.idr_pic_flag;
1517   picture->dec_ref_pic_marking = slice_hdr->dec_ref_pic_marking;
1518   if (picture->idr)
1519     picture->idr_pic_id = slice_hdr->idr_pic_id;
1520 
1521   if (slice_hdr->field_pic_flag)
1522     picture->field =
1523         slice_hdr->bottom_field_flag ?
1524         GST_H264_PICTURE_FIELD_BOTTOM_FIELD : GST_H264_PICTURE_FIELD_TOP_FIELD;
1525   else
1526     picture->field = GST_H264_PICTURE_FIELD_FRAME;
1527 
1528   if (!GST_H264_PICTURE_IS_FRAME (picture) && !klass->new_field_picture) {
1529     GST_FIXME_OBJECT (self, "Subclass doesn't support interlace stream");
1530     return FALSE;
1531   }
1532 
1533   picture->nal_ref_idc = slice->nalu.ref_idc;
1534   if (slice->nalu.ref_idc != 0)
1535     gst_h264_picture_set_reference (picture,
1536         GST_H264_PICTURE_REF_SHORT_TERM, FALSE);
1537 
1538   picture->frame_num = slice_hdr->frame_num;
1539 
1540   /* 7.4.3 */
1541   if (!slice_hdr->field_pic_flag)
1542     picture->pic_num = slice_hdr->frame_num;
1543   else
1544     picture->pic_num = 2 * slice_hdr->frame_num + 1;
1545 
1546   picture->pic_order_cnt_type = sps->pic_order_cnt_type;
1547   switch (picture->pic_order_cnt_type) {
1548     case 0:
1549       picture->pic_order_cnt_lsb = slice_hdr->pic_order_cnt_lsb;
1550       picture->delta_pic_order_cnt_bottom =
1551           slice_hdr->delta_pic_order_cnt_bottom;
1552       break;
1553     case 1:
1554       picture->delta_pic_order_cnt0 = slice_hdr->delta_pic_order_cnt[0];
1555       picture->delta_pic_order_cnt1 = slice_hdr->delta_pic_order_cnt[1];
1556       break;
1557     case 2:
1558       break;
1559     default:
1560       g_assert_not_reached ();
1561       return FALSE;
1562   }
1563 
1564   return TRUE;
1565 }
1566 
1567 static gboolean
gst_h264_decoder_calculate_poc(GstH264Decoder * self,GstH264Picture * picture)1568 gst_h264_decoder_calculate_poc (GstH264Decoder * self, GstH264Picture * picture)
1569 {
1570   GstH264DecoderPrivate *priv = self->priv;
1571   const GstH264SPS *sps = priv->active_sps;
1572 
1573   if (!sps) {
1574     GST_ERROR_OBJECT (self, "No active SPS");
1575     return FALSE;
1576   }
1577 
1578   switch (picture->pic_order_cnt_type) {
1579     case 0:{
1580       /* See spec 8.2.1.1 */
1581       gint prev_pic_order_cnt_msb, prev_pic_order_cnt_lsb;
1582       gint max_pic_order_cnt_lsb;
1583 
1584       if (picture->idr) {
1585         prev_pic_order_cnt_msb = prev_pic_order_cnt_lsb = 0;
1586       } else {
1587         if (priv->prev_ref_has_memmgmnt5) {
1588           if (priv->prev_ref_field != GST_H264_PICTURE_FIELD_BOTTOM_FIELD) {
1589             prev_pic_order_cnt_msb = 0;
1590             prev_pic_order_cnt_lsb = priv->prev_ref_top_field_order_cnt;
1591           } else {
1592             prev_pic_order_cnt_msb = 0;
1593             prev_pic_order_cnt_lsb = 0;
1594           }
1595         } else {
1596           prev_pic_order_cnt_msb = priv->prev_ref_pic_order_cnt_msb;
1597           prev_pic_order_cnt_lsb = priv->prev_ref_pic_order_cnt_lsb;
1598         }
1599       }
1600 
1601       max_pic_order_cnt_lsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
1602 
1603       if ((picture->pic_order_cnt_lsb < prev_pic_order_cnt_lsb) &&
1604           (prev_pic_order_cnt_lsb - picture->pic_order_cnt_lsb >=
1605               max_pic_order_cnt_lsb / 2)) {
1606         picture->pic_order_cnt_msb =
1607             prev_pic_order_cnt_msb + max_pic_order_cnt_lsb;
1608       } else if ((picture->pic_order_cnt_lsb > prev_pic_order_cnt_lsb)
1609           && (picture->pic_order_cnt_lsb - prev_pic_order_cnt_lsb >
1610               max_pic_order_cnt_lsb / 2)) {
1611         picture->pic_order_cnt_msb =
1612             prev_pic_order_cnt_msb - max_pic_order_cnt_lsb;
1613       } else {
1614         picture->pic_order_cnt_msb = prev_pic_order_cnt_msb;
1615       }
1616 
1617       if (picture->field != GST_H264_PICTURE_FIELD_BOTTOM_FIELD) {
1618         picture->top_field_order_cnt =
1619             picture->pic_order_cnt_msb + picture->pic_order_cnt_lsb;
1620       }
1621 
1622       switch (picture->field) {
1623         case GST_H264_PICTURE_FIELD_FRAME:
1624           picture->top_field_order_cnt = picture->pic_order_cnt_msb +
1625               picture->pic_order_cnt_lsb;
1626           picture->bottom_field_order_cnt = picture->top_field_order_cnt +
1627               picture->delta_pic_order_cnt_bottom;
1628           break;
1629         case GST_H264_PICTURE_FIELD_TOP_FIELD:
1630           picture->top_field_order_cnt = picture->pic_order_cnt_msb +
1631               picture->pic_order_cnt_lsb;
1632           break;
1633         case GST_H264_PICTURE_FIELD_BOTTOM_FIELD:
1634           picture->bottom_field_order_cnt = picture->pic_order_cnt_msb +
1635               picture->pic_order_cnt_lsb;
1636           break;
1637       }
1638       break;
1639     }
1640 
1641     case 1:{
1642       gint abs_frame_num = 0;
1643       gint expected_pic_order_cnt = 0;
1644       gint i;
1645 
1646       /* See spec 8.2.1.2 */
1647       if (priv->prev_has_memmgmnt5)
1648         priv->prev_frame_num_offset = 0;
1649 
1650       if (picture->idr)
1651         picture->frame_num_offset = 0;
1652       else if (priv->prev_frame_num > picture->frame_num)
1653         picture->frame_num_offset =
1654             priv->prev_frame_num_offset + priv->max_frame_num;
1655       else
1656         picture->frame_num_offset = priv->prev_frame_num_offset;
1657 
1658       if (sps->num_ref_frames_in_pic_order_cnt_cycle != 0)
1659         abs_frame_num = picture->frame_num_offset + picture->frame_num;
1660       else
1661         abs_frame_num = 0;
1662 
1663       if (picture->nal_ref_idc == 0 && abs_frame_num > 0)
1664         --abs_frame_num;
1665 
1666       if (abs_frame_num > 0) {
1667         gint pic_order_cnt_cycle_cnt, frame_num_in_pic_order_cnt_cycle;
1668         gint expected_delta_per_pic_order_cnt_cycle = 0;
1669 
1670         if (sps->num_ref_frames_in_pic_order_cnt_cycle == 0) {
1671           GST_WARNING_OBJECT (self,
1672               "Invalid num_ref_frames_in_pic_order_cnt_cycle in stream");
1673           return FALSE;
1674         }
1675 
1676         pic_order_cnt_cycle_cnt =
1677             (abs_frame_num - 1) / sps->num_ref_frames_in_pic_order_cnt_cycle;
1678         frame_num_in_pic_order_cnt_cycle =
1679             (abs_frame_num - 1) % sps->num_ref_frames_in_pic_order_cnt_cycle;
1680 
1681         for (i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++) {
1682           expected_delta_per_pic_order_cnt_cycle +=
1683               sps->offset_for_ref_frame[i];
1684         }
1685 
1686         expected_pic_order_cnt = pic_order_cnt_cycle_cnt *
1687             expected_delta_per_pic_order_cnt_cycle;
1688         /* frame_num_in_pic_order_cnt_cycle is verified < 255 in parser */
1689         for (i = 0; i <= frame_num_in_pic_order_cnt_cycle; ++i)
1690           expected_pic_order_cnt += sps->offset_for_ref_frame[i];
1691       }
1692 
1693       if (!picture->nal_ref_idc)
1694         expected_pic_order_cnt += sps->offset_for_non_ref_pic;
1695 
1696       if (GST_H264_PICTURE_IS_FRAME (picture)) {
1697         picture->top_field_order_cnt =
1698             expected_pic_order_cnt + picture->delta_pic_order_cnt0;
1699         picture->bottom_field_order_cnt = picture->top_field_order_cnt +
1700             sps->offset_for_top_to_bottom_field + picture->delta_pic_order_cnt1;
1701       } else if (picture->field != GST_H264_PICTURE_FIELD_BOTTOM_FIELD) {
1702         picture->top_field_order_cnt =
1703             expected_pic_order_cnt + picture->delta_pic_order_cnt0;
1704       } else {
1705         picture->bottom_field_order_cnt = expected_pic_order_cnt +
1706             sps->offset_for_top_to_bottom_field + picture->delta_pic_order_cnt0;
1707       }
1708       break;
1709     }
1710 
1711     case 2:{
1712       gint temp_pic_order_cnt;
1713 
1714       /* See spec 8.2.1.3 */
1715       if (priv->prev_has_memmgmnt5)
1716         priv->prev_frame_num_offset = 0;
1717 
1718       if (picture->idr)
1719         picture->frame_num_offset = 0;
1720       else if (priv->prev_frame_num > picture->frame_num)
1721         picture->frame_num_offset =
1722             priv->prev_frame_num_offset + priv->max_frame_num;
1723       else
1724         picture->frame_num_offset = priv->prev_frame_num_offset;
1725 
1726       if (picture->idr) {
1727         temp_pic_order_cnt = 0;
1728       } else if (!picture->nal_ref_idc) {
1729         temp_pic_order_cnt =
1730             2 * (picture->frame_num_offset + picture->frame_num) - 1;
1731       } else {
1732         temp_pic_order_cnt =
1733             2 * (picture->frame_num_offset + picture->frame_num);
1734       }
1735 
1736       if (GST_H264_PICTURE_IS_FRAME (picture)) {
1737         picture->top_field_order_cnt = temp_pic_order_cnt;
1738         picture->bottom_field_order_cnt = temp_pic_order_cnt;
1739       } else if (picture->field == GST_H264_PICTURE_FIELD_BOTTOM_FIELD) {
1740         picture->bottom_field_order_cnt = temp_pic_order_cnt;
1741       } else {
1742         picture->top_field_order_cnt = temp_pic_order_cnt;
1743       }
1744       break;
1745     }
1746 
1747     default:
1748       GST_WARNING_OBJECT (self,
1749           "Invalid pic_order_cnt_type: %d", sps->pic_order_cnt_type);
1750       return FALSE;
1751   }
1752 
1753   switch (picture->field) {
1754     case GST_H264_PICTURE_FIELD_FRAME:
1755       picture->pic_order_cnt =
1756           MIN (picture->top_field_order_cnt, picture->bottom_field_order_cnt);
1757       break;
1758     case GST_H264_PICTURE_FIELD_TOP_FIELD:
1759       picture->pic_order_cnt = picture->top_field_order_cnt;
1760       break;
1761     case GST_H264_PICTURE_FIELD_BOTTOM_FIELD:
1762       picture->pic_order_cnt = picture->bottom_field_order_cnt;
1763       break;
1764     default:
1765       g_assert_not_reached ();
1766       return FALSE;
1767   }
1768 
1769   return TRUE;
1770 }
1771 
1772 static void
gst_h264_decoder_drain_output_queue(GstH264Decoder * self,guint num,GstFlowReturn * ret)1773 gst_h264_decoder_drain_output_queue (GstH264Decoder * self, guint num,
1774     GstFlowReturn * ret)
1775 {
1776   GstH264DecoderPrivate *priv = self->priv;
1777   GstH264DecoderClass *klass = GST_H264_DECODER_GET_CLASS (self);
1778 
1779   g_assert (klass->output_picture);
1780   g_assert (ret != NULL);
1781 
1782   while (gst_queue_array_get_length (priv->output_queue) > num) {
1783     GstH264DecoderOutputFrame *output_frame = (GstH264DecoderOutputFrame *)
1784         gst_queue_array_pop_head_struct (priv->output_queue);
1785     GstFlowReturn flow_ret = klass->output_picture (self, output_frame->frame,
1786         output_frame->picture);
1787 
1788     UPDATE_FLOW_RETURN (ret, flow_ret);
1789   }
1790 }
1791 
1792 static void
gst_h264_decoder_do_output_picture(GstH264Decoder * self,GstH264Picture * picture,GstFlowReturn * ret)1793 gst_h264_decoder_do_output_picture (GstH264Decoder * self,
1794     GstH264Picture * picture, GstFlowReturn * ret)
1795 {
1796   GstH264DecoderPrivate *priv = self->priv;
1797   GstVideoCodecFrame *frame = NULL;
1798   GstH264DecoderOutputFrame output_frame;
1799   GstFlowReturn flow_ret = GST_FLOW_OK;
1800 
1801   g_assert (ret != NULL);
1802 
1803   GST_LOG_OBJECT (self, "Outputting picture %p (frame_num %d, poc %d)",
1804       picture, picture->frame_num, picture->pic_order_cnt);
1805 
1806   if (picture->pic_order_cnt < priv->last_output_poc) {
1807     GST_WARNING_OBJECT (self,
1808         "Outputting out of order %d -> %d, likely a broken stream",
1809         priv->last_output_poc, picture->pic_order_cnt);
1810   }
1811 
1812   priv->last_output_poc = picture->pic_order_cnt;
1813 
1814   frame = gst_video_decoder_get_frame (GST_VIDEO_DECODER (self),
1815       picture->system_frame_number);
1816 
1817   if (!frame) {
1818     GST_ERROR_OBJECT (self,
1819         "No available codec frame with frame number %d",
1820         picture->system_frame_number);
1821     UPDATE_FLOW_RETURN (ret, GST_FLOW_ERROR);
1822 
1823     gst_h264_picture_unref (picture);
1824 
1825     return;
1826   }
1827 
1828   output_frame.frame = frame;
1829   output_frame.picture = picture;
1830   output_frame.self = self;
1831   gst_queue_array_push_tail_struct (priv->output_queue, &output_frame);
1832 
1833   gst_h264_decoder_drain_output_queue (self, priv->preferred_output_delay,
1834       &flow_ret);
1835   UPDATE_FLOW_RETURN (ret, flow_ret);
1836 }
1837 
1838 static void
gst_h264_decoder_finish_current_picture(GstH264Decoder * self,GstFlowReturn * ret)1839 gst_h264_decoder_finish_current_picture (GstH264Decoder * self,
1840     GstFlowReturn * ret)
1841 {
1842   GstH264DecoderPrivate *priv = self->priv;
1843   GstH264DecoderClass *klass;
1844   GstFlowReturn flow_ret = GST_FLOW_OK;
1845 
1846   if (!priv->current_picture)
1847     return;
1848 
1849   klass = GST_H264_DECODER_GET_CLASS (self);
1850 
1851   if (klass->end_picture) {
1852     flow_ret = klass->end_picture (self, priv->current_picture);
1853     if (flow_ret != GST_FLOW_OK) {
1854       GST_WARNING_OBJECT (self,
1855           "end picture failed, marking picture %p non-existing "
1856           "(frame_num %d, poc %d)", priv->current_picture,
1857           priv->current_picture->frame_num,
1858           priv->current_picture->pic_order_cnt);
1859       priv->current_picture->nonexisting = TRUE;
1860 
1861       /* this fake nonexisting picture will not trigger ouput_picture() */
1862       gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self),
1863           gst_video_codec_frame_ref (priv->current_frame));
1864     }
1865   }
1866 
1867   /* We no longer need the per frame reference lists */
1868   gst_h264_decoder_clear_ref_pic_lists (self);
1869 
1870   /* finish picture takes ownership of the picture */
1871   gst_h264_decoder_finish_picture (self, priv->current_picture, &flow_ret);
1872   priv->current_picture = NULL;
1873 
1874   UPDATE_FLOW_RETURN (ret, flow_ret);
1875 }
1876 
1877 static gint
poc_asc_compare(const GstH264Picture ** a,const GstH264Picture ** b)1878 poc_asc_compare (const GstH264Picture ** a, const GstH264Picture ** b)
1879 {
1880   return (*a)->pic_order_cnt - (*b)->pic_order_cnt;
1881 }
1882 
1883 static gint
poc_desc_compare(const GstH264Picture ** a,const GstH264Picture ** b)1884 poc_desc_compare (const GstH264Picture ** a, const GstH264Picture ** b)
1885 {
1886   return (*b)->pic_order_cnt - (*a)->pic_order_cnt;
1887 }
1888 
1889 static GstFlowReturn
gst_h264_decoder_drain_internal(GstH264Decoder * self)1890 gst_h264_decoder_drain_internal (GstH264Decoder * self)
1891 {
1892   GstH264DecoderPrivate *priv = self->priv;
1893   GstH264Picture *picture;
1894   GstFlowReturn ret = GST_FLOW_OK;
1895 
1896   while ((picture = gst_h264_dpb_bump (priv->dpb, TRUE)) != NULL) {
1897     gst_h264_decoder_do_output_picture (self, picture, &ret);
1898   }
1899 
1900   gst_h264_decoder_drain_output_queue (self, 0, &ret);
1901 
1902   gst_h264_picture_clear (&priv->last_field);
1903   gst_h264_dpb_clear (priv->dpb);
1904   priv->last_output_poc = G_MININT32;
1905 
1906   return ret;
1907 }
1908 
1909 static gboolean
gst_h264_decoder_handle_memory_management_opt(GstH264Decoder * self,GstH264Picture * picture)1910 gst_h264_decoder_handle_memory_management_opt (GstH264Decoder * self,
1911     GstH264Picture * picture)
1912 {
1913   GstH264DecoderPrivate *priv = self->priv;
1914   gint i;
1915 
1916   for (i = 0; i < G_N_ELEMENTS (picture->dec_ref_pic_marking.ref_pic_marking);
1917       i++) {
1918     GstH264RefPicMarking *ref_pic_marking =
1919         &picture->dec_ref_pic_marking.ref_pic_marking[i];
1920     guint8 type = ref_pic_marking->memory_management_control_operation;
1921 
1922     GST_TRACE_OBJECT (self, "memory management operation %d, type %d", i, type);
1923 
1924     /* Normal end of operations' specification */
1925     if (type == 0)
1926       return TRUE;
1927 
1928     switch (type) {
1929       case 4:
1930         priv->max_long_term_frame_idx =
1931             ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
1932         break;
1933       case 5:
1934         priv->max_long_term_frame_idx = -1;
1935         break;
1936       default:
1937         break;
1938     }
1939 
1940     if (!gst_h264_dpb_perform_memory_management_control_operation (priv->dpb,
1941             ref_pic_marking, picture)) {
1942       GST_WARNING_OBJECT (self, "memory management operation type %d failed",
1943           type);
1944       /* Most likely our implementation fault, but let's just perform
1945        * next MMCO if any */
1946     }
1947   }
1948 
1949   return TRUE;
1950 }
1951 
1952 static gboolean
gst_h264_decoder_sliding_window_picture_marking(GstH264Decoder * self,GstH264Picture * picture)1953 gst_h264_decoder_sliding_window_picture_marking (GstH264Decoder * self,
1954     GstH264Picture * picture)
1955 {
1956   GstH264DecoderPrivate *priv = self->priv;
1957   const GstH264SPS *sps = priv->active_sps;
1958   gint num_ref_pics;
1959   gint max_num_ref_frames;
1960 
1961   /* Skip this for the second field */
1962   if (picture->second_field)
1963     return TRUE;
1964 
1965   if (!sps) {
1966     GST_ERROR_OBJECT (self, "No active sps");
1967     return FALSE;
1968   }
1969 
1970   /* 8.2.5.3. Ensure the DPB doesn't overflow by discarding the oldest picture */
1971   num_ref_pics = gst_h264_dpb_num_ref_frames (priv->dpb);
1972   max_num_ref_frames = MAX (1, sps->num_ref_frames);
1973 
1974   if (num_ref_pics < max_num_ref_frames)
1975     return TRUE;
1976 
1977   /* In theory, num_ref_pics shouldn't be larger than max_num_ref_frames
1978    * but it could happen if our implementation is wrong somehow or so.
1979    * Just try to remove reference pictures as many as possible in order to
1980    * avoid DPB overflow.
1981    */
1982   while (num_ref_pics >= max_num_ref_frames) {
1983     /* Max number of reference pics reached, need to remove one of the short
1984      * term ones. Find smallest frame_num_wrap short reference picture and mark
1985      * it as unused */
1986     GstH264Picture *to_unmark =
1987         gst_h264_dpb_get_lowest_frame_num_short_ref (priv->dpb);
1988 
1989     if (num_ref_pics > max_num_ref_frames) {
1990       GST_WARNING_OBJECT (self,
1991           "num_ref_pics %d is larger than allowed maximum %d",
1992           num_ref_pics, max_num_ref_frames);
1993     }
1994 
1995     if (!to_unmark) {
1996       GST_WARNING_OBJECT (self, "Could not find a short ref picture to unmark");
1997       return FALSE;
1998     }
1999 
2000     GST_TRACE_OBJECT (self,
2001         "Unmark reference flag of picture %p (frame_num %d, poc %d)",
2002         to_unmark, to_unmark->frame_num, to_unmark->pic_order_cnt);
2003 
2004     gst_h264_picture_set_reference (to_unmark, GST_H264_PICTURE_REF_NONE, TRUE);
2005     gst_h264_picture_unref (to_unmark);
2006 
2007     num_ref_pics--;
2008   }
2009 
2010   return TRUE;
2011 }
2012 
2013 /* This method ensures that DPB does not overflow, either by removing
2014  * reference pictures as specified in the stream, or using a sliding window
2015  * procedure to remove the oldest one.
2016  * It also performs marking and unmarking pictures as reference.
2017  * See spac 8.2.5.1 */
2018 static gboolean
gst_h264_decoder_reference_picture_marking(GstH264Decoder * self,GstH264Picture * picture)2019 gst_h264_decoder_reference_picture_marking (GstH264Decoder * self,
2020     GstH264Picture * picture)
2021 {
2022   GstH264DecoderPrivate *priv = self->priv;
2023 
2024   /* If the current picture is an IDR, all reference pictures are unmarked */
2025   if (picture->idr) {
2026     gst_h264_dpb_mark_all_non_ref (priv->dpb);
2027 
2028     if (picture->dec_ref_pic_marking.long_term_reference_flag) {
2029       gst_h264_picture_set_reference (picture,
2030           GST_H264_PICTURE_REF_LONG_TERM, FALSE);
2031       picture->long_term_frame_idx = 0;
2032       priv->max_long_term_frame_idx = 0;
2033     } else {
2034       gst_h264_picture_set_reference (picture,
2035           GST_H264_PICTURE_REF_SHORT_TERM, FALSE);
2036       priv->max_long_term_frame_idx = -1;
2037     }
2038 
2039     return TRUE;
2040   }
2041 
2042   /* Not an IDR. If the stream contains instructions on how to discard pictures
2043    * from DPB and how to mark/unmark existing reference pictures, do so.
2044    * Otherwise, fall back to default sliding window process */
2045   if (picture->dec_ref_pic_marking.adaptive_ref_pic_marking_mode_flag) {
2046     if (picture->nonexisting) {
2047       GST_WARNING_OBJECT (self,
2048           "Invalid memory management operation for non-existing picture "
2049           "%p (frame_num %d, poc %d", picture, picture->frame_num,
2050           picture->pic_order_cnt);
2051     }
2052 
2053     return gst_h264_decoder_handle_memory_management_opt (self, picture);
2054   }
2055 
2056   return gst_h264_decoder_sliding_window_picture_marking (self, picture);
2057 }
2058 
2059 static GstH264DpbBumpMode
get_bump_level(GstH264Decoder * self)2060 get_bump_level (GstH264Decoder * self)
2061 {
2062   GstH264DecoderPrivate *priv = self->priv;
2063 
2064   /* User set the mode explicitly. */
2065   switch (priv->compliance) {
2066     case GST_H264_DECODER_COMPLIANCE_STRICT:
2067       return GST_H264_DPB_BUMP_NORMAL_LATENCY;
2068     case GST_H264_DECODER_COMPLIANCE_NORMAL:
2069       return GST_H264_DPB_BUMP_LOW_LATENCY;
2070     case GST_H264_DECODER_COMPLIANCE_FLEXIBLE:
2071       return GST_H264_DPB_BUMP_VERY_LOW_LATENCY;
2072     default:
2073       break;
2074   }
2075 
2076   /* GST_H264_DECODER_COMPLIANCE_AUTO case. */
2077 
2078   if (priv->is_live) {
2079     /* The baseline and constrained-baseline profiles do not have B frames
2080        and do not use the picture reorder, safe to use the higher bump level. */
2081     if (priv->profile_idc == GST_H264_PROFILE_BASELINE)
2082       return GST_H264_DPB_BUMP_VERY_LOW_LATENCY;
2083 
2084     return GST_H264_DPB_BUMP_LOW_LATENCY;
2085   }
2086 
2087   return GST_H264_DPB_BUMP_NORMAL_LATENCY;
2088 }
2089 
2090 static void
gst_h264_decoder_finish_picture(GstH264Decoder * self,GstH264Picture * picture,GstFlowReturn * ret)2091 gst_h264_decoder_finish_picture (GstH264Decoder * self,
2092     GstH264Picture * picture, GstFlowReturn * ret)
2093 {
2094   GstVideoDecoder *decoder = GST_VIDEO_DECODER (self);
2095   GstH264DecoderPrivate *priv = self->priv;
2096   GstH264DpbBumpMode bump_level = get_bump_level (self);
2097 
2098   /* Finish processing the picture.
2099    * Start by storing previous picture data for later use */
2100   if (picture->ref) {
2101     gst_h264_decoder_reference_picture_marking (self, picture);
2102     priv->prev_ref_has_memmgmnt5 = picture->mem_mgmt_5;
2103     priv->prev_ref_top_field_order_cnt = picture->top_field_order_cnt;
2104     priv->prev_ref_pic_order_cnt_msb = picture->pic_order_cnt_msb;
2105     priv->prev_ref_pic_order_cnt_lsb = picture->pic_order_cnt_lsb;
2106     priv->prev_ref_field = picture->field;
2107     priv->prev_ref_frame_num = picture->frame_num;
2108   }
2109 
2110   priv->prev_frame_num = picture->frame_num;
2111   priv->prev_has_memmgmnt5 = picture->mem_mgmt_5;
2112   priv->prev_frame_num_offset = picture->frame_num_offset;
2113 
2114   /* Remove unused (for reference or later output) pictures from DPB, marking
2115    * them as such */
2116   gst_h264_dpb_delete_unused (priv->dpb);
2117 
2118   /* If field pictures belong to different codec frame,
2119    * drop codec frame of the second field because we are consuming
2120    * only the first codec frame via GstH264Decoder::output_picture() method */
2121   if (picture->second_field && picture->other_field &&
2122       picture->system_frame_number !=
2123       picture->other_field->system_frame_number) {
2124     GstVideoCodecFrame *frame = gst_video_decoder_get_frame (decoder,
2125         picture->system_frame_number);
2126 
2127     gst_video_decoder_release_frame (decoder, frame);
2128   }
2129 
2130   /* C.4.4 */
2131   if (picture->mem_mgmt_5) {
2132     GstFlowReturn drain_ret;
2133 
2134     GST_TRACE_OBJECT (self, "Memory management type 5, drain the DPB");
2135 
2136     drain_ret = gst_h264_decoder_drain_internal (self);
2137     UPDATE_FLOW_RETURN (ret, drain_ret);
2138   }
2139 
2140   _bump_dpb (self, bump_level, picture, ret);
2141 
2142   /* Add a ref to avoid the case of directly outputed and destroyed. */
2143   gst_h264_picture_ref (picture);
2144 
2145   /* C.4.5.1, C.4.5.2
2146      - If the current decoded picture is the second field of a complementary
2147      reference field pair, add to DPB.
2148      C.4.5.1
2149      For A reference decoded picture, the "bumping" process is invoked
2150      repeatedly until there is an empty frame buffer, then add to DPB:
2151      C.4.5.2
2152      For a non-reference decoded picture, if there is empty frame buffer
2153      after bumping the smaller POC, add to DPB.
2154      Otherwise, output directly. */
2155   if ((picture->second_field && picture->other_field
2156           && picture->other_field->ref)
2157       || picture->ref || gst_h264_dpb_has_empty_frame_buffer (priv->dpb)) {
2158     /* Split frame into top/bottom field pictures for reference picture marking
2159      * process. Even if current picture has field_pic_flag equal to zero,
2160      * if next picture is a field picture, complementary field pair of reference
2161      * frame should have individual pic_num and long_term_pic_num.
2162      */
2163     if (gst_h264_dpb_get_interlaced (priv->dpb) &&
2164         GST_H264_PICTURE_IS_FRAME (picture)) {
2165       GstH264Picture *other_field =
2166           gst_h264_decoder_split_frame (self, picture);
2167 
2168       add_picture_to_dpb (self, picture);
2169       if (!other_field) {
2170         GST_WARNING_OBJECT (self,
2171             "Couldn't split frame into complementary field pair");
2172         /* Keep decoding anyway... */
2173       } else {
2174         add_picture_to_dpb (self, other_field);
2175       }
2176     } else {
2177       add_picture_to_dpb (self, picture);
2178     }
2179   } else {
2180     output_picture_directly (self, picture, ret);
2181   }
2182 
2183   GST_LOG_OBJECT (self,
2184       "Finishing picture %p (frame_num %d, poc %d), entries in DPB %d",
2185       picture, picture->frame_num, picture->pic_order_cnt,
2186       gst_h264_dpb_get_size (priv->dpb));
2187 
2188   gst_h264_picture_unref (picture);
2189 
2190   /* For the live mode, we try to bump here to avoid waiting
2191      for another decoding circle. */
2192   if (priv->is_live && priv->compliance != GST_H264_DECODER_COMPLIANCE_STRICT)
2193     _bump_dpb (self, bump_level, NULL, ret);
2194 }
2195 
2196 static gboolean
gst_h264_decoder_update_max_num_reorder_frames(GstH264Decoder * self,GstH264SPS * sps)2197 gst_h264_decoder_update_max_num_reorder_frames (GstH264Decoder * self,
2198     GstH264SPS * sps)
2199 {
2200   GstH264DecoderPrivate *priv = self->priv;
2201   gsize max_num_reorder_frames = 0;
2202 
2203   if (sps->vui_parameters_present_flag
2204       && sps->vui_parameters.bitstream_restriction_flag) {
2205     max_num_reorder_frames = sps->vui_parameters.num_reorder_frames;
2206     if (max_num_reorder_frames > gst_h264_dpb_get_max_num_frames (priv->dpb)) {
2207       GST_WARNING
2208           ("max_num_reorder_frames present, but larger than MaxDpbFrames (%d > %d)",
2209           (gint) max_num_reorder_frames,
2210           gst_h264_dpb_get_max_num_frames (priv->dpb));
2211 
2212       max_num_reorder_frames = 0;
2213       return FALSE;
2214     }
2215 
2216     gst_h264_dpb_set_max_num_reorder_frames (priv->dpb, max_num_reorder_frames);
2217 
2218     return TRUE;
2219   }
2220 
2221   if (priv->compliance == GST_H264_DECODER_COMPLIANCE_STRICT) {
2222     gst_h264_dpb_set_max_num_reorder_frames (priv->dpb,
2223         gst_h264_dpb_get_max_num_frames (priv->dpb));
2224     return TRUE;
2225   }
2226 
2227   /* max_num_reorder_frames not present, infer it from profile/constraints. */
2228   if (sps->profile_idc == 66 || sps->profile_idc == 83) {
2229     /* baseline, constrained baseline and scalable-baseline profiles
2230        only contain I/P frames. */
2231     max_num_reorder_frames = 0;
2232   } else if (sps->constraint_set3_flag) {
2233     /* constraint_set3_flag may mean the -intra only profile. */
2234     switch (sps->profile_idc) {
2235       case 44:
2236       case 86:
2237       case 100:
2238       case 110:
2239       case 122:
2240       case 244:
2241         max_num_reorder_frames = 0;
2242         break;
2243       default:
2244         max_num_reorder_frames = gst_h264_dpb_get_max_num_frames (priv->dpb);
2245         break;
2246     }
2247   } else {
2248     max_num_reorder_frames = gst_h264_dpb_get_max_num_frames (priv->dpb);
2249   }
2250 
2251   gst_h264_dpb_set_max_num_reorder_frames (priv->dpb, max_num_reorder_frames);
2252 
2253   return TRUE;
2254 }
2255 
2256 typedef enum
2257 {
2258   GST_H264_LEVEL_L1 = 10,
2259   GST_H264_LEVEL_L1B = 9,
2260   GST_H264_LEVEL_L1_1 = 11,
2261   GST_H264_LEVEL_L1_2 = 12,
2262   GST_H264_LEVEL_L1_3 = 13,
2263   GST_H264_LEVEL_L2_0 = 20,
2264   GST_H264_LEVEL_L2_1 = 21,
2265   GST_H264_LEVEL_L2_2 = 22,
2266   GST_H264_LEVEL_L3 = 30,
2267   GST_H264_LEVEL_L3_1 = 31,
2268   GST_H264_LEVEL_L3_2 = 32,
2269   GST_H264_LEVEL_L4 = 40,
2270   GST_H264_LEVEL_L4_1 = 41,
2271   GST_H264_LEVEL_L4_2 = 42,
2272   GST_H264_LEVEL_L5 = 50,
2273   GST_H264_LEVEL_L5_1 = 51,
2274   GST_H264_LEVEL_L5_2 = 52,
2275   GST_H264_LEVEL_L6 = 60,
2276   GST_H264_LEVEL_L6_1 = 61,
2277   GST_H264_LEVEL_L6_2 = 62,
2278 } GstH264DecoderLevel;
2279 
2280 typedef struct
2281 {
2282   GstH264DecoderLevel level;
2283 
2284   guint32 max_mbps;
2285   guint32 max_fs;
2286   guint32 max_dpb_mbs;
2287   guint32 max_main_br;
2288 } LevelLimits;
2289 
2290 static const LevelLimits level_limits_map[] = {
2291   {GST_H264_LEVEL_L1, 1485, 99, 396, 64},
2292   {GST_H264_LEVEL_L1B, 1485, 99, 396, 128},
2293   {GST_H264_LEVEL_L1_1, 3000, 396, 900, 192},
2294   {GST_H264_LEVEL_L1_2, 6000, 396, 2376, 384},
2295   {GST_H264_LEVEL_L1_3, 11800, 396, 2376, 768},
2296   {GST_H264_LEVEL_L2_0, 11880, 396, 2376, 2000},
2297   {GST_H264_LEVEL_L2_1, 19800, 792, 4752, 4000},
2298   {GST_H264_LEVEL_L2_2, 20250, 1620, 8100, 4000},
2299   {GST_H264_LEVEL_L3, 40500, 1620, 8100, 10000},
2300   {GST_H264_LEVEL_L3_1, 108000, 3600, 18000, 14000},
2301   {GST_H264_LEVEL_L3_2, 216000, 5120, 20480, 20000},
2302   {GST_H264_LEVEL_L4, 245760, 8192, 32768, 20000},
2303   {GST_H264_LEVEL_L4_1, 245760, 8192, 32768, 50000},
2304   {GST_H264_LEVEL_L4_2, 522240, 8704, 34816, 50000},
2305   {GST_H264_LEVEL_L5, 589824, 22080, 110400, 135000},
2306   {GST_H264_LEVEL_L5_1, 983040, 36864, 184320, 240000},
2307   {GST_H264_LEVEL_L5_2, 2073600, 36864, 184320, 240000},
2308   {GST_H264_LEVEL_L6, 4177920, 139264, 696320, 240000},
2309   {GST_H264_LEVEL_L6_1, 8355840, 139264, 696320, 480000},
2310   {GST_H264_LEVEL_L6_2, 16711680, 139264, 696320, 800000}
2311 };
2312 
2313 static gint
h264_level_to_max_dpb_mbs(GstH264DecoderLevel level)2314 h264_level_to_max_dpb_mbs (GstH264DecoderLevel level)
2315 {
2316   gint i;
2317   for (i = 0; i < G_N_ELEMENTS (level_limits_map); i++) {
2318     if (level == level_limits_map[i].level)
2319       return level_limits_map[i].max_dpb_mbs;
2320   }
2321 
2322   return 0;
2323 }
2324 
2325 static void
gst_h264_decoder_set_latency(GstH264Decoder * self,const GstH264SPS * sps,gint max_dpb_size)2326 gst_h264_decoder_set_latency (GstH264Decoder * self, const GstH264SPS * sps,
2327     gint max_dpb_size)
2328 {
2329   GstH264DecoderPrivate *priv = self->priv;
2330   GstCaps *caps;
2331   GstClockTime min, max;
2332   GstStructure *structure;
2333   gint fps_d = 1, fps_n = 0;
2334   GstH264DpbBumpMode bump_level;
2335   guint32 frames_delay;
2336 
2337   caps = gst_pad_get_current_caps (GST_VIDEO_DECODER_SRC_PAD (self));
2338   if (!caps)
2339     return;
2340 
2341   structure = gst_caps_get_structure (caps, 0);
2342   if (gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d)) {
2343     if (fps_n == 0) {
2344       /* variable framerate: see if we have a max-framerate */
2345       gst_structure_get_fraction (structure, "max-framerate", &fps_n, &fps_d);
2346     }
2347   }
2348   gst_caps_unref (caps);
2349 
2350   /* if no fps or variable, then 25/1 */
2351   if (fps_n == 0) {
2352     fps_n = 25;
2353     fps_d = 1;
2354   }
2355 
2356   bump_level = get_bump_level (self);
2357   frames_delay = 0;
2358   switch (bump_level) {
2359     case GST_H264_DPB_BUMP_NORMAL_LATENCY:
2360       /* We always wait the DPB full before bumping. */
2361       frames_delay = max_dpb_size;
2362       break;
2363     case GST_H264_DPB_BUMP_LOW_LATENCY:
2364       /* We bump the IDR if the second frame is not a minus POC. */
2365       frames_delay = 1;
2366       break;
2367     case GST_H264_DPB_BUMP_VERY_LOW_LATENCY:
2368       /* We bump the IDR immediately. */
2369       frames_delay = 0;
2370       break;
2371     default:
2372       g_assert_not_reached ();
2373       break;
2374   }
2375 
2376   /* Consider output delay wanted by subclass */
2377   frames_delay += priv->preferred_output_delay;
2378 
2379   min = gst_util_uint64_scale_int (frames_delay * GST_SECOND, fps_d, fps_n);
2380   max = gst_util_uint64_scale_int ((max_dpb_size + priv->preferred_output_delay)
2381       * GST_SECOND, fps_d, fps_n);
2382 
2383   GST_LOG_OBJECT (self,
2384       "latency min %" G_GUINT64_FORMAT " max %" G_GUINT64_FORMAT, min, max);
2385 
2386   gst_video_decoder_set_latency (GST_VIDEO_DECODER (self), min, max);
2387 }
2388 
2389 static GstFlowReturn
gst_h264_decoder_process_sps(GstH264Decoder * self,GstH264SPS * sps)2390 gst_h264_decoder_process_sps (GstH264Decoder * self, GstH264SPS * sps)
2391 {
2392   GstH264DecoderClass *klass = GST_H264_DECODER_GET_CLASS (self);
2393   GstH264DecoderPrivate *priv = self->priv;
2394   guint8 level;
2395   gint max_dpb_mbs;
2396   gint width_mb, height_mb;
2397   gint max_dpb_frames;
2398   gint max_dpb_size;
2399   gint prev_max_dpb_size;
2400   gboolean prev_interlaced;
2401   gboolean interlaced;
2402   GstFlowReturn ret = GST_FLOW_OK;
2403 
2404   if (sps->frame_mbs_only_flag == 0) {
2405     if (!klass->new_field_picture) {
2406       GST_FIXME_OBJECT (self,
2407           "frame_mbs_only_flag != 1 not supported by subclass");
2408       return GST_FLOW_NOT_NEGOTIATED;
2409     }
2410 
2411     if (sps->mb_adaptive_frame_field_flag) {
2412       GST_LOG_OBJECT (self,
2413           "mb_adaptive_frame_field_flag == 1, MBAFF sequence");
2414     } else {
2415       GST_LOG_OBJECT (self, "mb_adaptive_frame_field_flag == 0, PAFF sequence");
2416     }
2417   }
2418 
2419   interlaced = !sps->frame_mbs_only_flag;
2420 
2421   /* Spec A.3.1 and A.3.2
2422    * For Baseline, Constrained Baseline and Main profile, the indicated level is
2423    * Level 1b if level_idc is equal to 11 and constraint_set3_flag is equal to 1
2424    */
2425   level = sps->level_idc;
2426   if (level == 11 && (sps->profile_idc == 66 || sps->profile_idc == 77) &&
2427       sps->constraint_set3_flag) {
2428     /* Level 1b */
2429     level = 9;
2430   }
2431 
2432   max_dpb_mbs = h264_level_to_max_dpb_mbs ((GstH264DecoderLevel) level);
2433   if (!max_dpb_mbs)
2434     return GST_FLOW_ERROR;
2435 
2436   width_mb = sps->width / 16;
2437   height_mb = sps->height / 16;
2438 
2439   max_dpb_frames = MIN (max_dpb_mbs / (width_mb * height_mb),
2440       GST_H264_DPB_MAX_SIZE);
2441 
2442   if (sps->vui_parameters_present_flag
2443       && sps->vui_parameters.bitstream_restriction_flag)
2444     max_dpb_frames = MAX (1, sps->vui_parameters.max_dec_frame_buffering);
2445 
2446   /* Case 1) There might be some non-conforming streams that require more DPB
2447    * size than that of specified one by SPS
2448    * Case 2) If bitstream_restriction_flag is not present,
2449    * max_dec_frame_buffering should be inferred
2450    * to be equal to MaxDpbFrames, then MaxDpbFrames can exceed num_ref_frames
2451    * See https://chromium-review.googlesource.com/c/chromium/src/+/760276/
2452    */
2453   max_dpb_size = MAX (max_dpb_frames, sps->num_ref_frames);
2454   if (max_dpb_size > GST_H264_DPB_MAX_SIZE) {
2455     GST_WARNING_OBJECT (self, "Too large calculated DPB size %d", max_dpb_size);
2456     max_dpb_size = GST_H264_DPB_MAX_SIZE;
2457   }
2458 
2459   /* Safety, so that subclass don't need bound checking */
2460   g_return_val_if_fail (max_dpb_size <= GST_H264_DPB_MAX_SIZE, GST_FLOW_ERROR);
2461 
2462   prev_max_dpb_size = gst_h264_dpb_get_max_num_frames (priv->dpb);
2463   prev_interlaced = gst_h264_dpb_get_interlaced (priv->dpb);
2464   if (priv->width != sps->width || priv->height != sps->height ||
2465       prev_max_dpb_size != max_dpb_size || prev_interlaced != interlaced) {
2466     GstH264DecoderClass *klass = GST_H264_DECODER_GET_CLASS (self);
2467 
2468     GST_DEBUG_OBJECT (self,
2469         "SPS updated, resolution: %dx%d -> %dx%d, dpb size: %d -> %d, "
2470         "interlaced %d -> %d",
2471         priv->width, priv->height, sps->width, sps->height,
2472         prev_max_dpb_size, max_dpb_size, prev_interlaced, interlaced);
2473 
2474     ret = gst_h264_decoder_drain (GST_VIDEO_DECODER (self));
2475     if (ret != GST_FLOW_OK)
2476       return ret;
2477 
2478     g_assert (klass->new_sequence);
2479 
2480     if (klass->get_preferred_output_delay) {
2481       priv->preferred_output_delay =
2482           klass->get_preferred_output_delay (self, priv->is_live);
2483     } else {
2484       priv->preferred_output_delay = 0;
2485     }
2486 
2487     ret = klass->new_sequence (self,
2488         sps, max_dpb_size + priv->preferred_output_delay);
2489     if (ret != GST_FLOW_OK) {
2490       GST_WARNING_OBJECT (self, "subclass does not want accept new sequence");
2491       return ret;
2492     }
2493 
2494     priv->profile_idc = sps->profile_idc;
2495     priv->width = sps->width;
2496     priv->height = sps->height;
2497 
2498     gst_h264_decoder_set_latency (self, sps, max_dpb_size);
2499     gst_h264_dpb_set_max_num_frames (priv->dpb, max_dpb_size);
2500     gst_h264_dpb_set_interlaced (priv->dpb, interlaced);
2501   }
2502 
2503   if (!gst_h264_decoder_update_max_num_reorder_frames (self, sps))
2504     return GST_FLOW_ERROR;
2505 
2506   return GST_FLOW_OK;
2507 }
2508 
2509 static gboolean
gst_h264_decoder_init_gap_picture(GstH264Decoder * self,GstH264Picture * picture,gint frame_num)2510 gst_h264_decoder_init_gap_picture (GstH264Decoder * self,
2511     GstH264Picture * picture, gint frame_num)
2512 {
2513   picture->nonexisting = TRUE;
2514   picture->nal_ref_idc = 1;
2515   picture->frame_num = picture->pic_num = frame_num;
2516   picture->dec_ref_pic_marking.adaptive_ref_pic_marking_mode_flag = FALSE;
2517   picture->ref = GST_H264_PICTURE_REF_SHORT_TERM;
2518   picture->ref_pic = TRUE;
2519   picture->dec_ref_pic_marking.long_term_reference_flag = FALSE;
2520   picture->field = GST_H264_PICTURE_FIELD_FRAME;
2521 
2522   return gst_h264_decoder_calculate_poc (self, picture);
2523 }
2524 
2525 static GstFlowReturn
gst_h264_decoder_decode_slice(GstH264Decoder * self)2526 gst_h264_decoder_decode_slice (GstH264Decoder * self)
2527 {
2528   GstH264DecoderClass *klass = GST_H264_DECODER_GET_CLASS (self);
2529   GstH264DecoderPrivate *priv = self->priv;
2530   GstH264Slice *slice = &priv->current_slice;
2531   GstH264Picture *picture = priv->current_picture;
2532   GArray *ref_pic_list0 = NULL;
2533   GArray *ref_pic_list1 = NULL;
2534   GstFlowReturn ret = GST_FLOW_OK;
2535 
2536   if (!picture) {
2537     GST_ERROR_OBJECT (self, "No current picture");
2538     return GST_FLOW_ERROR;
2539   }
2540 
2541   GST_LOG_OBJECT (self, "Decode picture %p (frame_num %d, poc %d)",
2542       picture, picture->frame_num, picture->pic_order_cnt);
2543 
2544   priv->max_pic_num = slice->header.max_pic_num;
2545 
2546   if (priv->process_ref_pic_lists) {
2547     if (!gst_h264_decoder_modify_ref_pic_lists (self)) {
2548       ret = GST_FLOW_ERROR;
2549       goto beach;
2550     }
2551 
2552     ref_pic_list0 = priv->ref_pic_list0;
2553     ref_pic_list1 = priv->ref_pic_list1;
2554   }
2555 
2556   g_assert (klass->decode_slice);
2557 
2558   ret = klass->decode_slice (self, picture, slice, ref_pic_list0,
2559       ref_pic_list1);
2560   if (ret != GST_FLOW_OK) {
2561     GST_WARNING_OBJECT (self,
2562         "Subclass didn't want to decode picture %p (frame_num %d, poc %d)",
2563         picture, picture->frame_num, picture->pic_order_cnt);
2564   }
2565 
2566 beach:
2567   g_array_set_size (priv->ref_pic_list0, 0);
2568   g_array_set_size (priv->ref_pic_list1, 0);
2569 
2570   return ret;
2571 }
2572 
2573 static gint
pic_num_desc_compare(const GstH264Picture ** a,const GstH264Picture ** b)2574 pic_num_desc_compare (const GstH264Picture ** a, const GstH264Picture ** b)
2575 {
2576   return (*b)->pic_num - (*a)->pic_num;
2577 }
2578 
2579 static gint
long_term_pic_num_asc_compare(const GstH264Picture ** a,const GstH264Picture ** b)2580 long_term_pic_num_asc_compare (const GstH264Picture ** a,
2581     const GstH264Picture ** b)
2582 {
2583   return (*a)->long_term_pic_num - (*b)->long_term_pic_num;
2584 }
2585 
2586 static void
construct_ref_pic_lists_p(GstH264Decoder * self,GstH264Picture * current_picture)2587 construct_ref_pic_lists_p (GstH264Decoder * self,
2588     GstH264Picture * current_picture)
2589 {
2590   GstH264DecoderPrivate *priv = self->priv;
2591   gint pos;
2592 
2593   /* RefPicList0 (8.2.4.2.1) [[1] [2]], where:
2594    * [1] shortterm ref pics sorted by descending pic_num,
2595    * [2] longterm ref pics by ascending long_term_pic_num.
2596    */
2597   g_array_set_size (priv->ref_pic_list_p0, 0);
2598 
2599   gst_h264_dpb_get_pictures_short_term_ref (priv->dpb,
2600       TRUE, FALSE, priv->ref_pic_list_p0);
2601   g_array_sort (priv->ref_pic_list_p0, (GCompareFunc) pic_num_desc_compare);
2602 
2603   pos = priv->ref_pic_list_p0->len;
2604   gst_h264_dpb_get_pictures_long_term_ref (priv->dpb,
2605       FALSE, priv->ref_pic_list_p0);
2606   g_qsort_with_data (&g_array_index (priv->ref_pic_list_p0, gpointer, pos),
2607       priv->ref_pic_list_p0->len - pos, sizeof (gpointer),
2608       (GCompareDataFunc) long_term_pic_num_asc_compare, NULL);
2609 
2610 #ifndef GST_DISABLE_GST_DEBUG
2611   if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= GST_LEVEL_DEBUG) {
2612     GString *str = g_string_new (NULL);
2613     for (pos = 0; pos < priv->ref_pic_list_p0->len; pos++) {
2614       GstH264Picture *ref =
2615           g_array_index (priv->ref_pic_list_p0, GstH264Picture *, pos);
2616       if (!GST_H264_PICTURE_IS_LONG_TERM_REF (ref))
2617         g_string_append_printf (str, "|%i", ref->pic_num);
2618       else
2619         g_string_append_printf (str, "|%is", ref->pic_num);
2620     }
2621     GST_DEBUG_OBJECT (self, "ref_pic_list_p0: %s|", str->str);
2622     g_string_free (str, TRUE);
2623   }
2624 #endif
2625 }
2626 
2627 static gint
frame_num_wrap_desc_compare(const GstH264Picture ** a,const GstH264Picture ** b)2628 frame_num_wrap_desc_compare (const GstH264Picture ** a,
2629     const GstH264Picture ** b)
2630 {
2631   return (*b)->frame_num_wrap - (*a)->frame_num_wrap;
2632 }
2633 
2634 static gint
long_term_frame_idx_asc_compare(const GstH264Picture ** a,const GstH264Picture ** b)2635 long_term_frame_idx_asc_compare (const GstH264Picture ** a,
2636     const GstH264Picture ** b)
2637 {
2638   return (*a)->long_term_frame_idx - (*b)->long_term_frame_idx;
2639 }
2640 
2641 /* init_picture_refs_fields_1 in gstvaapidecoder_h264.c */
2642 static void
init_picture_refs_fields_1(GstH264Decoder * self,GstH264PictureField field,GArray * ref_frame_list,GArray * ref_pic_list_x)2643 init_picture_refs_fields_1 (GstH264Decoder * self, GstH264PictureField field,
2644     GArray * ref_frame_list, GArray * ref_pic_list_x)
2645 {
2646   guint i = 0, j = 0;
2647 
2648   do {
2649     for (; i < ref_frame_list->len; i++) {
2650       GstH264Picture *pic = g_array_index (ref_frame_list, GstH264Picture *, i);
2651       if (pic->field == field) {
2652         pic = gst_h264_picture_ref (pic);
2653         g_array_append_val (ref_pic_list_x, pic);
2654         i++;
2655         break;
2656       }
2657     }
2658 
2659     for (; j < ref_frame_list->len; j++) {
2660       GstH264Picture *pic = g_array_index (ref_frame_list, GstH264Picture *, j);
2661       if (pic->field != field) {
2662         pic = gst_h264_picture_ref (pic);
2663         g_array_append_val (ref_pic_list_x, pic);
2664         j++;
2665         break;
2666       }
2667     }
2668   } while (i < ref_frame_list->len || j < ref_frame_list->len);
2669 }
2670 
2671 static void
construct_ref_field_pic_lists_p(GstH264Decoder * self,GstH264Picture * current_picture)2672 construct_ref_field_pic_lists_p (GstH264Decoder * self,
2673     GstH264Picture * current_picture)
2674 {
2675   GstH264DecoderPrivate *priv = self->priv;
2676   gint pos;
2677 
2678   g_array_set_size (priv->ref_pic_list_p0, 0);
2679   g_array_set_size (priv->ref_frame_list_0_short_term, 0);
2680   g_array_set_size (priv->ref_frame_list_long_term, 0);
2681 
2682   /* 8.2.4.2.2, 8.2.4.2.5 refFrameList0ShortTerm:
2683    * short-term ref pictures sorted by descending frame_num_wrap.
2684    */
2685   gst_h264_dpb_get_pictures_short_term_ref (priv->dpb,
2686       TRUE, TRUE, priv->ref_frame_list_0_short_term);
2687   g_array_sort (priv->ref_frame_list_0_short_term,
2688       (GCompareFunc) frame_num_wrap_desc_compare);
2689 
2690 #ifndef GST_DISABLE_GST_DEBUG
2691   if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= GST_LEVEL_TRACE
2692       && priv->ref_frame_list_0_short_term->len) {
2693     GString *str = g_string_new (NULL);
2694     for (pos = 0; pos < priv->ref_frame_list_0_short_term->len; pos++) {
2695       GstH264Picture *ref = g_array_index (priv->ref_frame_list_0_short_term,
2696           GstH264Picture *, pos);
2697       g_string_append_printf (str, "|%i(%d)", ref->frame_num_wrap, ref->field);
2698     }
2699     GST_TRACE_OBJECT (self, "ref_frame_list_0_short_term (%d): %s|",
2700         current_picture->field, str->str);
2701     g_string_free (str, TRUE);
2702   }
2703 #endif
2704 
2705   /* 8.2.4.2.2 refFrameList0LongTerm,:
2706    * long-term ref pictures sorted by ascending long_term_frame_idx.
2707    */
2708   gst_h264_dpb_get_pictures_long_term_ref (priv->dpb,
2709       TRUE, priv->ref_frame_list_long_term);
2710   g_array_sort (priv->ref_frame_list_long_term,
2711       (GCompareFunc) long_term_frame_idx_asc_compare);
2712 
2713 #ifndef GST_DISABLE_GST_DEBUG
2714   if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= GST_LEVEL_TRACE
2715       && priv->ref_frame_list_long_term->len) {
2716     GString *str = g_string_new (NULL);
2717     for (pos = 0; pos < priv->ref_frame_list_long_term->len; pos++) {
2718       GstH264Picture *ref = g_array_index (priv->ref_frame_list_0_short_term,
2719           GstH264Picture *, pos);
2720       g_string_append_printf (str, "|%i(%d)", ref->long_term_frame_idx,
2721           ref->field);
2722     }
2723     GST_TRACE_OBJECT (self, "ref_frame_list_0_long_term (%d): %s|",
2724         current_picture->field, str->str);
2725     g_string_free (str, TRUE);
2726   }
2727 #endif
2728 
2729   /* 8.2.4.2.5 */
2730   init_picture_refs_fields_1 (self, current_picture->field,
2731       priv->ref_frame_list_0_short_term, priv->ref_pic_list_p0);
2732   init_picture_refs_fields_1 (self, current_picture->field,
2733       priv->ref_frame_list_long_term, priv->ref_pic_list_p0);
2734 
2735 #ifndef GST_DISABLE_GST_DEBUG
2736   if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= GST_LEVEL_DEBUG
2737       && priv->ref_pic_list_p0->len) {
2738     GString *str = g_string_new (NULL);
2739     for (pos = 0; pos < priv->ref_pic_list_p0->len; pos++) {
2740       GstH264Picture *ref =
2741           g_array_index (priv->ref_pic_list_p0, GstH264Picture *, pos);
2742       if (!GST_H264_PICTURE_IS_LONG_TERM_REF (ref))
2743         g_string_append_printf (str, "|%i(%d)s", ref->frame_num_wrap,
2744             ref->field);
2745       else
2746         g_string_append_printf (str, "|%i(%d)l", ref->long_term_frame_idx,
2747             ref->field);
2748     }
2749     GST_DEBUG_OBJECT (self, "ref_pic_list_p0 (%d): %s|", current_picture->field,
2750         str->str);
2751     g_string_free (str, TRUE);
2752   }
2753 #endif
2754 
2755   /* Clear temporary lists, now pictures are owned by ref_pic_list_p0 */
2756   g_array_set_size (priv->ref_frame_list_0_short_term, 0);
2757   g_array_set_size (priv->ref_frame_list_long_term, 0);
2758 }
2759 
2760 static gboolean
lists_are_equal(GArray * l1,GArray * l2)2761 lists_are_equal (GArray * l1, GArray * l2)
2762 {
2763   gint i;
2764 
2765   if (l1->len != l2->len)
2766     return FALSE;
2767 
2768   for (i = 0; i < l1->len; i++)
2769     if (g_array_index (l1, gpointer, i) != g_array_index (l2, gpointer, i))
2770       return FALSE;
2771 
2772   return TRUE;
2773 }
2774 
2775 static gint
split_ref_pic_list_b(GstH264Decoder * self,GArray * ref_pic_list_b,GCompareFunc compare_func)2776 split_ref_pic_list_b (GstH264Decoder * self, GArray * ref_pic_list_b,
2777     GCompareFunc compare_func)
2778 {
2779   gint pos;
2780 
2781   for (pos = 0; pos < ref_pic_list_b->len; pos++) {
2782     GstH264Picture *pic = g_array_index (ref_pic_list_b, GstH264Picture *, pos);
2783     if (compare_func (&pic, &self->priv->current_picture) > 0)
2784       break;
2785   }
2786 
2787   return pos;
2788 }
2789 
2790 static void
print_ref_pic_list_b(GstH264Decoder * self,GArray * ref_list_b,const gchar * name)2791 print_ref_pic_list_b (GstH264Decoder * self, GArray * ref_list_b,
2792     const gchar * name)
2793 {
2794 #ifndef GST_DISABLE_GST_DEBUG
2795   GString *str;
2796   gint i;
2797 
2798   if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) < GST_LEVEL_DEBUG)
2799     return;
2800 
2801   str = g_string_new (NULL);
2802 
2803   for (i = 0; i < ref_list_b->len; i++) {
2804     GstH264Picture *ref = g_array_index (ref_list_b, GstH264Picture *, i);
2805 
2806     if (!GST_H264_PICTURE_IS_LONG_TERM_REF (ref))
2807       g_string_append_printf (str, "|%i", ref->pic_order_cnt);
2808     else
2809       g_string_append_printf (str, "|%il", ref->long_term_pic_num);
2810   }
2811 
2812   GST_DEBUG_OBJECT (self, "%s: %s| curr %i", name, str->str,
2813       self->priv->current_picture->pic_order_cnt);
2814   g_string_free (str, TRUE);
2815 #endif
2816 }
2817 
2818 static void
construct_ref_pic_lists_b(GstH264Decoder * self,GstH264Picture * current_picture)2819 construct_ref_pic_lists_b (GstH264Decoder * self,
2820     GstH264Picture * current_picture)
2821 {
2822   GstH264DecoderPrivate *priv = self->priv;
2823   gint pos;
2824 
2825   /* RefPicList0 (8.2.4.2.3) [[1] [2] [3]], where:
2826    * [1] shortterm ref pics with POC < current_picture's POC sorted by descending POC,
2827    * [2] shortterm ref pics with POC > current_picture's POC by ascending POC,
2828    * [3] longterm ref pics by ascending long_term_pic_num.
2829    */
2830   g_array_set_size (priv->ref_pic_list_b0, 0);
2831   g_array_set_size (priv->ref_pic_list_b1, 0);
2832 
2833   /* 8.2.4.2.3
2834    * When pic_order_cnt_type is equal to 0, reference pictures that are marked
2835    * as "non-existing" as specified in clause 8.2.5.2 are not included in either
2836    * RefPicList0 or RefPicList1
2837    */
2838   gst_h264_dpb_get_pictures_short_term_ref (priv->dpb,
2839       current_picture->pic_order_cnt_type != 0, FALSE, priv->ref_pic_list_b0);
2840 
2841   /* First sort ascending, this will put [1] in right place and finish
2842    * [2]. */
2843   print_ref_pic_list_b (self, priv->ref_pic_list_b0, "ref_pic_list_b0");
2844   g_array_sort (priv->ref_pic_list_b0, (GCompareFunc) poc_asc_compare);
2845   print_ref_pic_list_b (self, priv->ref_pic_list_b0, "ref_pic_list_b0");
2846 
2847   /* Find first with POC > current_picture's POC to get first element
2848    * in [2]... */
2849   pos = split_ref_pic_list_b (self, priv->ref_pic_list_b0,
2850       (GCompareFunc) poc_asc_compare);
2851 
2852   GST_DEBUG_OBJECT (self, "split point %i", pos);
2853 
2854   /* and sort [1] descending, thus finishing sequence [1] [2]. */
2855   g_qsort_with_data (priv->ref_pic_list_b0->data, pos, sizeof (gpointer),
2856       (GCompareDataFunc) poc_desc_compare, NULL);
2857 
2858   /* Now add [3] and sort by ascending long_term_pic_num. */
2859   pos = priv->ref_pic_list_b0->len;
2860   gst_h264_dpb_get_pictures_long_term_ref (priv->dpb,
2861       FALSE, priv->ref_pic_list_b0);
2862   g_qsort_with_data (&g_array_index (priv->ref_pic_list_b0, gpointer, pos),
2863       priv->ref_pic_list_b0->len - pos, sizeof (gpointer),
2864       (GCompareDataFunc) long_term_pic_num_asc_compare, NULL);
2865 
2866   /* RefPicList1 (8.2.4.2.4) [[1] [2] [3]], where:
2867    * [1] shortterm ref pics with POC > curr_pic's POC sorted by ascending POC,
2868    * [2] shortterm ref pics with POC < curr_pic's POC by descending POC,
2869    * [3] longterm ref pics by ascending long_term_pic_num.
2870    */
2871   gst_h264_dpb_get_pictures_short_term_ref (priv->dpb,
2872       current_picture->pic_order_cnt_type != 0, FALSE, priv->ref_pic_list_b1);
2873 
2874   /* First sort by descending POC. */
2875   g_array_sort (priv->ref_pic_list_b1, (GCompareFunc) poc_desc_compare);
2876 
2877   /* Split at first with POC < current_picture's POC to get first element
2878    * in [2]... */
2879   pos = split_ref_pic_list_b (self, priv->ref_pic_list_b1,
2880       (GCompareFunc) poc_desc_compare);
2881 
2882   /* and sort [1] ascending. */
2883   g_qsort_with_data (priv->ref_pic_list_b1->data, pos, sizeof (gpointer),
2884       (GCompareDataFunc) poc_asc_compare, NULL);
2885 
2886   /* Now add [3] and sort by ascending long_term_pic_num */
2887   pos = priv->ref_pic_list_b1->len;
2888   gst_h264_dpb_get_pictures_long_term_ref (priv->dpb,
2889       FALSE, priv->ref_pic_list_b1);
2890   g_qsort_with_data (&g_array_index (priv->ref_pic_list_b1, gpointer, pos),
2891       priv->ref_pic_list_b1->len - pos, sizeof (gpointer),
2892       (GCompareDataFunc) long_term_pic_num_asc_compare, NULL);
2893 
2894   /* If lists identical, swap first two entries in RefPicList1 (spec
2895    * 8.2.4.2.3) */
2896   if (priv->ref_pic_list_b1->len > 1
2897       && lists_are_equal (priv->ref_pic_list_b0, priv->ref_pic_list_b1)) {
2898     /* swap */
2899     GstH264Picture **list = (GstH264Picture **) priv->ref_pic_list_b1->data;
2900     GstH264Picture *pic = list[0];
2901     list[0] = list[1];
2902     list[1] = pic;
2903   }
2904 
2905   print_ref_pic_list_b (self, priv->ref_pic_list_b0, "ref_pic_list_b0");
2906   print_ref_pic_list_b (self, priv->ref_pic_list_b1, "ref_pic_list_b1");
2907 }
2908 
2909 static void
construct_ref_field_pic_lists_b(GstH264Decoder * self,GstH264Picture * current_picture)2910 construct_ref_field_pic_lists_b (GstH264Decoder * self,
2911     GstH264Picture * current_picture)
2912 {
2913   GstH264DecoderPrivate *priv = self->priv;
2914   gint pos;
2915 
2916   /* refFrameList0ShortTerm (8.2.4.2.4) [[1] [2]], where:
2917    * [1] shortterm ref pics with POC < current_picture's POC sorted by descending POC,
2918    * [2] shortterm ref pics with POC > current_picture's POC by ascending POC,
2919    */
2920   g_array_set_size (priv->ref_pic_list_b0, 0);
2921   g_array_set_size (priv->ref_pic_list_b1, 0);
2922   g_array_set_size (priv->ref_frame_list_0_short_term, 0);
2923   g_array_set_size (priv->ref_frame_list_1_short_term, 0);
2924   g_array_set_size (priv->ref_frame_list_long_term, 0);
2925 
2926   /* 8.2.4.2.4
2927    * When pic_order_cnt_type is equal to 0, reference pictures that are marked
2928    * as "non-existing" as specified in clause 8.2.5.2 are not included in either
2929    * RefPicList0 or RefPicList1
2930    */
2931   gst_h264_dpb_get_pictures_short_term_ref (priv->dpb,
2932       current_picture->pic_order_cnt_type != 0, TRUE,
2933       priv->ref_frame_list_0_short_term);
2934 
2935   /* First sort ascending, this will put [1] in right place and finish
2936    * [2]. */
2937   print_ref_pic_list_b (self, priv->ref_frame_list_0_short_term,
2938       "ref_frame_list_0_short_term");
2939   g_array_sort (priv->ref_frame_list_0_short_term,
2940       (GCompareFunc) poc_asc_compare);
2941   print_ref_pic_list_b (self, priv->ref_frame_list_0_short_term,
2942       "ref_frame_list_0_short_term");
2943 
2944   /* Find first with POC > current_picture's POC to get first element
2945    * in [2]... */
2946   pos = split_ref_pic_list_b (self, priv->ref_frame_list_0_short_term,
2947       (GCompareFunc) poc_asc_compare);
2948 
2949   GST_DEBUG_OBJECT (self, "split point %i", pos);
2950 
2951   /* and sort [1] descending, thus finishing sequence [1] [2]. */
2952   g_qsort_with_data (priv->ref_frame_list_0_short_term->data, pos,
2953       sizeof (gpointer), (GCompareDataFunc) poc_desc_compare, NULL);
2954 
2955   /* refFrameList1ShortTerm (8.2.4.2.4) [[1] [2]], where:
2956    * [1] shortterm ref pics with POC > curr_pic's POC sorted by ascending POC,
2957    * [2] shortterm ref pics with POC < curr_pic's POC by descending POC,
2958    */
2959   gst_h264_dpb_get_pictures_short_term_ref (priv->dpb,
2960       current_picture->pic_order_cnt_type != 0, TRUE,
2961       priv->ref_frame_list_1_short_term);
2962 
2963   /* First sort by descending POC. */
2964   g_array_sort (priv->ref_frame_list_1_short_term,
2965       (GCompareFunc) poc_desc_compare);
2966 
2967   /* Split at first with POC < current_picture's POC to get first element
2968    * in [2]... */
2969   pos = split_ref_pic_list_b (self, priv->ref_frame_list_1_short_term,
2970       (GCompareFunc) poc_desc_compare);
2971 
2972   /* and sort [1] ascending. */
2973   g_qsort_with_data (priv->ref_frame_list_1_short_term->data, pos,
2974       sizeof (gpointer), (GCompareDataFunc) poc_asc_compare, NULL);
2975 
2976   /* 8.2.4.2.2 refFrameList0LongTerm,:
2977    * long-term ref pictures sorted by ascending long_term_frame_idx.
2978    */
2979   gst_h264_dpb_get_pictures_long_term_ref (priv->dpb,
2980       TRUE, priv->ref_frame_list_long_term);
2981   g_array_sort (priv->ref_frame_list_long_term,
2982       (GCompareFunc) long_term_frame_idx_asc_compare);
2983 
2984   /* 8.2.4.2.5 RefPicList0 */
2985   init_picture_refs_fields_1 (self, current_picture->field,
2986       priv->ref_frame_list_0_short_term, priv->ref_pic_list_b0);
2987   init_picture_refs_fields_1 (self, current_picture->field,
2988       priv->ref_frame_list_long_term, priv->ref_pic_list_b0);
2989 
2990   /* 8.2.4.2.5 RefPicList1 */
2991   init_picture_refs_fields_1 (self, current_picture->field,
2992       priv->ref_frame_list_1_short_term, priv->ref_pic_list_b1);
2993   init_picture_refs_fields_1 (self, current_picture->field,
2994       priv->ref_frame_list_long_term, priv->ref_pic_list_b1);
2995 
2996   /* If lists identical, swap first two entries in RefPicList1 (spec
2997    * 8.2.4.2.5) */
2998   if (priv->ref_pic_list_b1->len > 1
2999       && lists_are_equal (priv->ref_pic_list_b0, priv->ref_pic_list_b1)) {
3000     /* swap */
3001     GstH264Picture **list = (GstH264Picture **) priv->ref_pic_list_b1->data;
3002     GstH264Picture *pic = list[0];
3003     list[0] = list[1];
3004     list[1] = pic;
3005   }
3006 
3007   print_ref_pic_list_b (self, priv->ref_pic_list_b0, "ref_pic_list_b0");
3008   print_ref_pic_list_b (self, priv->ref_pic_list_b1, "ref_pic_list_b1");
3009 
3010   /* Clear temporary lists, now pictures are owned by ref_pic_list_b0
3011    * and ref_pic_list_b1 */
3012   g_array_set_size (priv->ref_frame_list_0_short_term, 0);
3013   g_array_set_size (priv->ref_frame_list_1_short_term, 0);
3014   g_array_set_size (priv->ref_frame_list_long_term, 0);
3015 }
3016 
3017 static void
gst_h264_decoder_prepare_ref_pic_lists(GstH264Decoder * self,GstH264Picture * current_picture)3018 gst_h264_decoder_prepare_ref_pic_lists (GstH264Decoder * self,
3019     GstH264Picture * current_picture)
3020 {
3021   GstH264DecoderPrivate *priv = self->priv;
3022   gboolean construct_list = FALSE;
3023   gint i;
3024   GArray *dpb_array = gst_h264_dpb_get_pictures_all (priv->dpb);
3025 
3026   /* 8.2.4.2.1 ~ 8.2.4.2.4
3027    * When this process is invoked, there shall be at least one reference entry
3028    * that is currently marked as "used for reference"
3029    * (i.e., as "used for short-term reference" or "used for long-term reference")
3030    * and is not marked as "non-existing"
3031    */
3032   for (i = 0; i < dpb_array->len; i++) {
3033     GstH264Picture *picture = g_array_index (dpb_array, GstH264Picture *, i);
3034     if (GST_H264_PICTURE_IS_REF (picture) && !picture->nonexisting) {
3035       construct_list = TRUE;
3036       break;
3037     }
3038   }
3039   g_array_unref (dpb_array);
3040 
3041   if (!construct_list) {
3042     gst_h264_decoder_clear_ref_pic_lists (self);
3043     return;
3044   }
3045 
3046   if (GST_H264_PICTURE_IS_FRAME (current_picture)) {
3047     construct_ref_pic_lists_p (self, current_picture);
3048     construct_ref_pic_lists_b (self, current_picture);
3049   } else {
3050     construct_ref_field_pic_lists_p (self, current_picture);
3051     construct_ref_field_pic_lists_b (self, current_picture);
3052   }
3053 }
3054 
3055 static void
gst_h264_decoder_clear_ref_pic_lists(GstH264Decoder * self)3056 gst_h264_decoder_clear_ref_pic_lists (GstH264Decoder * self)
3057 {
3058   GstH264DecoderPrivate *priv = self->priv;
3059 
3060   g_array_set_size (priv->ref_pic_list_p0, 0);
3061   g_array_set_size (priv->ref_pic_list_b0, 0);
3062   g_array_set_size (priv->ref_pic_list_b1, 0);
3063 }
3064 
3065 static gint
long_term_pic_num_f(GstH264Decoder * self,const GstH264Picture * picture)3066 long_term_pic_num_f (GstH264Decoder * self, const GstH264Picture * picture)
3067 {
3068   if (GST_H264_PICTURE_IS_LONG_TERM_REF (picture))
3069     return picture->long_term_pic_num;
3070   return 2 * (self->priv->max_long_term_frame_idx + 1);
3071 }
3072 
3073 static gint
pic_num_f(GstH264Decoder * self,const GstH264Picture * picture)3074 pic_num_f (GstH264Decoder * self, const GstH264Picture * picture)
3075 {
3076   if (!GST_H264_PICTURE_IS_LONG_TERM_REF (picture))
3077     return picture->pic_num;
3078   return self->priv->max_pic_num;
3079 }
3080 
3081 /* shift elements on the |array| starting from |from| to |to|,
3082  * inclusive, one position to the right and insert pic at |from| */
3083 static void
shift_right_and_insert(GArray * array,gint from,gint to,GstH264Picture * picture)3084 shift_right_and_insert (GArray * array, gint from, gint to,
3085     GstH264Picture * picture)
3086 {
3087   g_return_if_fail (from <= to);
3088   g_return_if_fail (array && picture);
3089 
3090   g_array_set_size (array, to + 2);
3091   g_array_insert_val (array, from, picture);
3092 }
3093 
3094 /* This can process either ref_pic_list0 or ref_pic_list1, depending
3095  * on the list argument. Set up pointers to proper list to be
3096  * processed here. */
3097 static gboolean
modify_ref_pic_list(GstH264Decoder * self,int list)3098 modify_ref_pic_list (GstH264Decoder * self, int list)
3099 {
3100   GstH264DecoderPrivate *priv = self->priv;
3101   GstH264Picture *picture = priv->current_picture;
3102   GArray *ref_pic_listx;
3103   const GstH264SliceHdr *slice_hdr = &priv->current_slice.header;
3104   const GstH264RefPicListModification *list_mod;
3105   gboolean ref_pic_list_modification_flag_lX;
3106   gint num_ref_idx_lX_active_minus1;
3107   guint num_ref_pic_list_modifications;
3108   gint i;
3109   gint pic_num_lx_pred = picture->pic_num;
3110   gint ref_idx_lx = 0, src, dst;
3111   gint pic_num_lx_no_wrap;
3112   gint pic_num_lx;
3113   gboolean done = FALSE;
3114   GstH264Picture *pic;
3115 
3116   if (list == 0) {
3117     ref_pic_listx = priv->ref_pic_list0;
3118     ref_pic_list_modification_flag_lX =
3119         slice_hdr->ref_pic_list_modification_flag_l0;
3120     num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l0;
3121     num_ref_idx_lX_active_minus1 = slice_hdr->num_ref_idx_l0_active_minus1;
3122     list_mod = slice_hdr->ref_pic_list_modification_l0;
3123   } else {
3124     ref_pic_listx = priv->ref_pic_list1;
3125     ref_pic_list_modification_flag_lX =
3126         slice_hdr->ref_pic_list_modification_flag_l1;
3127     num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l1;
3128     num_ref_idx_lX_active_minus1 = slice_hdr->num_ref_idx_l1_active_minus1;
3129     list_mod = slice_hdr->ref_pic_list_modification_l1;
3130   }
3131 
3132   /* Resize the list to the size requested in the slice header.
3133    *
3134    * Note that per 8.2.4.2 it's possible for
3135    * num_ref_idx_lX_active_minus1 to indicate there should be more ref
3136    * pics on list than we constructed.  Those superfluous ones should
3137    * be treated as non-reference and will be initialized to null,
3138    * which must be handled by clients */
3139   g_assert (num_ref_idx_lX_active_minus1 >= 0);
3140   if (ref_pic_listx->len > num_ref_idx_lX_active_minus1 + 1)
3141     g_array_set_size (ref_pic_listx, num_ref_idx_lX_active_minus1 + 1);
3142 
3143   if (!ref_pic_list_modification_flag_lX)
3144     return TRUE;
3145 
3146   /* Spec 8.2.4.3:
3147    * Reorder pictures on the list in a way specified in the stream. */
3148   for (i = 0; i < num_ref_pic_list_modifications && !done; i++) {
3149     switch (list_mod->modification_of_pic_nums_idc) {
3150         /* 8.2.4.3.1 - Modify short reference picture position. */
3151       case 0:
3152       case 1:
3153         /* 8-34 */
3154         if (list_mod->modification_of_pic_nums_idc == 0) {
3155           /* Substract given value from predicted PicNum. */
3156           pic_num_lx_no_wrap = pic_num_lx_pred -
3157               (list_mod->value.abs_diff_pic_num_minus1 + 1);
3158           /* Wrap around max_pic_num if it becomes < 0 as result of
3159            * subtraction */
3160           if (pic_num_lx_no_wrap < 0)
3161             pic_num_lx_no_wrap += priv->max_pic_num;
3162         } else {                /* 8-35 */
3163           /* Add given value to predicted PicNum. */
3164           pic_num_lx_no_wrap = pic_num_lx_pred +
3165               (list_mod->value.abs_diff_pic_num_minus1 + 1);
3166           /* Wrap around max_pic_num if it becomes >= max_pic_num as
3167            * result of the addition */
3168           if (pic_num_lx_no_wrap >= priv->max_pic_num)
3169             pic_num_lx_no_wrap -= priv->max_pic_num;
3170         }
3171 
3172         /* For use in next iteration */
3173         pic_num_lx_pred = pic_num_lx_no_wrap;
3174 
3175         /* 8-36 */
3176         if (pic_num_lx_no_wrap > picture->pic_num)
3177           pic_num_lx = pic_num_lx_no_wrap - priv->max_pic_num;
3178         else
3179           pic_num_lx = pic_num_lx_no_wrap;
3180 
3181         /* 8-37 */
3182         g_assert (num_ref_idx_lX_active_minus1 + 1 < 32);
3183         pic = gst_h264_dpb_get_short_ref_by_pic_num (priv->dpb, pic_num_lx);
3184         if (!pic) {
3185           GST_WARNING_OBJECT (self, "Malformed stream, no pic num %d",
3186               pic_num_lx);
3187           break;
3188         }
3189         shift_right_and_insert (ref_pic_listx, ref_idx_lx,
3190             num_ref_idx_lX_active_minus1, pic);
3191         ref_idx_lx++;
3192 
3193         for (src = ref_idx_lx, dst = ref_idx_lx;
3194             src <= num_ref_idx_lX_active_minus1 + 1; src++) {
3195           GstH264Picture *src_pic =
3196               g_array_index (ref_pic_listx, GstH264Picture *, src);
3197           gint src_pic_num_lx = src_pic ? pic_num_f (self, src_pic) : -1;
3198           if (src_pic_num_lx != pic_num_lx)
3199             g_array_index (ref_pic_listx, GstH264Picture *, dst++) = src_pic;
3200         }
3201 
3202         break;
3203 
3204         /* 8.2.4.3.2 - Long-term reference pictures */
3205       case 2:
3206         /* (8-28) */
3207         g_assert (num_ref_idx_lX_active_minus1 + 1 < 32);
3208         pic = gst_h264_dpb_get_long_ref_by_long_term_pic_num (priv->dpb,
3209             list_mod->value.long_term_pic_num);
3210         if (!pic) {
3211           GST_WARNING_OBJECT (self, "Malformed stream, no pic num %d",
3212               list_mod->value.long_term_pic_num);
3213           break;
3214         }
3215         shift_right_and_insert (ref_pic_listx, ref_idx_lx,
3216             num_ref_idx_lX_active_minus1, pic);
3217         ref_idx_lx++;
3218 
3219         for (src = ref_idx_lx, dst = ref_idx_lx;
3220             src <= num_ref_idx_lX_active_minus1 + 1; src++) {
3221           GstH264Picture *src_pic =
3222               g_array_index (ref_pic_listx, GstH264Picture *, src);
3223           if (long_term_pic_num_f (self, src_pic) !=
3224               list_mod->value.long_term_pic_num)
3225             g_array_index (ref_pic_listx, GstH264Picture *, dst++) = src_pic;
3226         }
3227 
3228         break;
3229 
3230         /* End of modification list */
3231       case 3:
3232         done = TRUE;
3233         break;
3234 
3235       default:
3236         /* may be recoverable */
3237         GST_WARNING ("Invalid modification_of_pic_nums_idc = %d",
3238             list_mod->modification_of_pic_nums_idc);
3239         break;
3240     }
3241 
3242     list_mod++;
3243   }
3244 
3245   /* Per NOTE 2 in 8.2.4.3.2, the ref_pic_listx in the above loop is
3246    * temporarily made one element longer than the required final list.
3247    * Resize the list back to its required size. */
3248   if (ref_pic_listx->len > num_ref_idx_lX_active_minus1 + 1)
3249     g_array_set_size (ref_pic_listx, num_ref_idx_lX_active_minus1 + 1);
3250 
3251   return TRUE;
3252 }
3253 
3254 static void
copy_pic_list_into(GArray * dest,GArray * src)3255 copy_pic_list_into (GArray * dest, GArray * src)
3256 {
3257   gint i;
3258   g_array_set_size (dest, 0);
3259 
3260   for (i = 0; i < src->len; i++)
3261     g_array_append_val (dest, g_array_index (src, gpointer, i));
3262 }
3263 
3264 static gboolean
gst_h264_decoder_modify_ref_pic_lists(GstH264Decoder * self)3265 gst_h264_decoder_modify_ref_pic_lists (GstH264Decoder * self)
3266 {
3267   GstH264DecoderPrivate *priv = self->priv;
3268   GstH264SliceHdr *slice_hdr = &priv->current_slice.header;
3269 
3270   g_array_set_size (priv->ref_pic_list0, 0);
3271   g_array_set_size (priv->ref_pic_list1, 0);
3272 
3273   if (GST_H264_IS_P_SLICE (slice_hdr) || GST_H264_IS_SP_SLICE (slice_hdr)) {
3274     /* 8.2.4 fill reference picture list RefPicList0 for P or SP slice */
3275     copy_pic_list_into (priv->ref_pic_list0, priv->ref_pic_list_p0);
3276     return modify_ref_pic_list (self, 0);
3277   } else if (GST_H264_IS_B_SLICE (slice_hdr)) {
3278     /* 8.2.4 fill reference picture list RefPicList0 and RefPicList1 for B slice */
3279     copy_pic_list_into (priv->ref_pic_list0, priv->ref_pic_list_b0);
3280     copy_pic_list_into (priv->ref_pic_list1, priv->ref_pic_list_b1);
3281     return modify_ref_pic_list (self, 0)
3282         && modify_ref_pic_list (self, 1);
3283   }
3284 
3285   return TRUE;
3286 }
3287 
3288 /**
3289  * gst_h264_decoder_set_process_ref_pic_lists:
3290  * @decoder: a #GstH264Decoder
3291  * @process: whether subclass is requiring reference picture modification process
3292  *
3293  * Called to en/disable reference picture modification process.
3294  *
3295  * Since: 1.18
3296  */
3297 void
gst_h264_decoder_set_process_ref_pic_lists(GstH264Decoder * decoder,gboolean process)3298 gst_h264_decoder_set_process_ref_pic_lists (GstH264Decoder * decoder,
3299     gboolean process)
3300 {
3301   decoder->priv->process_ref_pic_lists = process;
3302 }
3303 
3304 /**
3305  * gst_h264_decoder_get_picture:
3306  * @decoder: a #GstH264Decoder
3307  * @system_frame_number: a target system frame number of #GstH264Picture
3308  *
3309  * Retrive DPB and return a #GstH264Picture corresponding to
3310  * the @system_frame_number
3311  *
3312  * Returns: (transfer full): a #GstH264Picture if successful, or %NULL otherwise
3313  *
3314  * Since: 1.18
3315  */
3316 GstH264Picture *
gst_h264_decoder_get_picture(GstH264Decoder * decoder,guint32 system_frame_number)3317 gst_h264_decoder_get_picture (GstH264Decoder * decoder,
3318     guint32 system_frame_number)
3319 {
3320   return gst_h264_dpb_get_picture (decoder->priv->dpb, system_frame_number);
3321 }
3322