• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* GStreamer
2  * Copyright (C) 2020 Daniel Almeida <daniel.almeida@collabora.com>
3  *
4  * This library is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Library General Public
6  * License as published by the Free Software Foundation; either
7  * version 2 of the License, or (at your option) any later version.
8  *
9  * This library is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12  * Library General Public License for more details.
13  *
14  * You should have received a copy of the GNU Library General Public
15  * License along with this library; if not, write to the
16  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17  * Boston, MA 02110-1301, USA.
18  */
19 
20 
21 #ifdef HAVE_CONFIG_H
22 #include <config.h>
23 #endif
24 
25 #include "gstv4l2codecallocator.h"
26 #include "gstv4l2codecmpeg2dec.h"
27 #include "gstv4l2codecpool.h"
28 #include "gstv4l2format.h"
29 #include "linux/v4l2-controls.h"
30 
31 #define KERNEL_VERSION(a,b,c) (((a) << 16) + ((b) << 8) + (c))
32 
33 #define V4L2_MIN_KERNEL_VER_MAJOR 5
34 #define V4L2_MIN_KERNEL_VER_MINOR 14
35 #define V4L2_MIN_KERNEL_VERSION \
36     KERNEL_VERSION(V4L2_MIN_KERNEL_VER_MAJOR, V4L2_MIN_KERNEL_VER_MINOR, 0)
37 
38 #define MPEG2_BITDEPTH 8
39 
40 GST_DEBUG_CATEGORY_STATIC (v4l2_mpeg2dec_debug);
41 #define GST_CAT_DEFAULT v4l2_mpeg2dec_debug
42 
43 enum
44 {
45   PROP_0,
46   PROP_LAST = PROP_0
47 };
48 
49 static GstStaticPadTemplate sink_template =
50 GST_STATIC_PAD_TEMPLATE (GST_VIDEO_DECODER_SINK_NAME,
51     GST_PAD_SINK, GST_PAD_ALWAYS,
52     GST_STATIC_CAPS ("video/mpeg, "
53         "systemstream=(boolean) false, "
54         "mpegversion=(int) 2, " "profile=(string) {main, simple} "));
55 
56 static GstStaticPadTemplate src_template =
57 GST_STATIC_PAD_TEMPLATE (GST_VIDEO_DECODER_SRC_NAME,
58     GST_PAD_SRC, GST_PAD_ALWAYS,
59     GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (GST_V4L2_DEFAULT_VIDEO_FORMATS)));
60 
61 struct _GstV4l2CodecMpeg2Dec
62 {
63   GstMpeg2Decoder parent;
64 
65   GstV4l2Decoder *decoder;
66   GstVideoCodecState *output_state;
67   GstVideoInfo vinfo;
68 
69   guint16 width;
70   guint16 height;
71   guint chroma_format;
72   gboolean interlaced;
73   GstMpegVideoProfile profile;
74   guint16 vbv_buffer_size;
75   gboolean need_sequence;
76   gboolean need_quantiser;
77 
78   struct v4l2_ctrl_mpeg2_sequence v4l2_sequence;
79   struct v4l2_ctrl_mpeg2_picture v4l2_picture;
80   struct v4l2_ctrl_mpeg2_quantisation v4l2_quantisation;
81 
82   GstV4l2CodecAllocator *sink_allocator;
83   GstV4l2CodecAllocator *src_allocator;
84   GstV4l2CodecPool *src_pool;
85   gint min_pool_size;
86   gboolean has_videometa;
87   gboolean need_negotiation;
88 
89   GstMemory *bitstream;
90   GstMapInfo bitstream_map;
91 
92   gboolean copy_frames;
93 };
94 
95 G_DEFINE_ABSTRACT_TYPE (GstV4l2CodecMpeg2Dec, gst_v4l2_codec_mpeg2_dec,
96     GST_TYPE_MPEG2_DECODER);
97 
98 #define parent_class gst_v4l2_codec_mpeg2_dec_parent_class
99 
100 static guint
gst_v4l2_codec_mpeg2_dec_get_preferred_output_delay(GstMpeg2Decoder * decoder,gboolean is_live)101 gst_v4l2_codec_mpeg2_dec_get_preferred_output_delay (GstMpeg2Decoder * decoder,
102     gboolean is_live)
103 {
104   GstV4l2CodecMpeg2Dec *self = GST_V4L2_CODEC_MPEG2_DEC (decoder);
105   guint delay;
106 
107   if (is_live)
108     delay = 0;
109   else
110     /* Just one for now, perhaps we can make this configurable in the future. */
111     delay = 1;
112 
113   gst_v4l2_decoder_set_render_delay (self->decoder, delay);
114 
115   return delay;
116 }
117 
118 static gboolean
gst_v4l2_codec_mpeg2_dec_open(GstVideoDecoder * decoder)119 gst_v4l2_codec_mpeg2_dec_open (GstVideoDecoder * decoder)
120 {
121   GstV4l2CodecMpeg2Dec *self = GST_V4L2_CODEC_MPEG2_DEC (decoder);
122   guint version;
123 
124   if (!gst_v4l2_decoder_open (self->decoder)) {
125     GST_ELEMENT_ERROR (self, RESOURCE, OPEN_READ_WRITE,
126         ("Failed to open mpeg2 decoder"),
127         ("gst_v4l2_decoder_open() failed: %s", g_strerror (errno)));
128     return FALSE;
129   }
130 
131   version = gst_v4l2_decoder_get_version (self->decoder);
132   if (version < V4L2_MIN_KERNEL_VERSION) {
133     GST_ERROR_OBJECT (self,
134         "V4L2 API v%u.%u too old, at least v%u.%u required",
135         (version >> 16) & 0xff, (version >> 8) & 0xff,
136         V4L2_MIN_KERNEL_VER_MAJOR, V4L2_MIN_KERNEL_VER_MINOR);
137 
138     gst_v4l2_decoder_close (self->decoder);
139     return FALSE;
140   }
141 
142   return TRUE;
143 }
144 
145 static gboolean
gst_v4l2_codec_mpeg2_dec_close(GstVideoDecoder * decoder)146 gst_v4l2_codec_mpeg2_dec_close (GstVideoDecoder * decoder)
147 {
148   GstV4l2CodecMpeg2Dec *self = GST_V4L2_CODEC_MPEG2_DEC (decoder);
149   return gst_v4l2_decoder_close (self->decoder);
150 }
151 
152 static void
gst_v4l2_codec_mpeg2_dec_reset_allocation(GstV4l2CodecMpeg2Dec * self)153 gst_v4l2_codec_mpeg2_dec_reset_allocation (GstV4l2CodecMpeg2Dec * self)
154 {
155   if (self->sink_allocator) {
156     gst_v4l2_codec_allocator_detach (self->sink_allocator);
157     g_clear_object (&self->sink_allocator);
158   }
159 
160   if (self->src_allocator) {
161     gst_v4l2_codec_allocator_detach (self->src_allocator);
162     g_clear_object (&self->src_allocator);
163     g_clear_object (&self->src_pool);
164   }
165 }
166 
167 static gboolean
gst_v4l2_codec_mpeg2_dec_stop(GstVideoDecoder * decoder)168 gst_v4l2_codec_mpeg2_dec_stop (GstVideoDecoder * decoder)
169 {
170   GstV4l2CodecMpeg2Dec *self = GST_V4L2_CODEC_MPEG2_DEC (decoder);
171 
172   gst_v4l2_decoder_streamoff (self->decoder, GST_PAD_SINK);
173   gst_v4l2_decoder_streamoff (self->decoder, GST_PAD_SRC);
174 
175   gst_v4l2_codec_mpeg2_dec_reset_allocation (self);
176 
177   if (self->output_state)
178     gst_video_codec_state_unref (self->output_state);
179   self->output_state = NULL;
180 
181   return GST_VIDEO_DECODER_CLASS (parent_class)->stop (decoder);
182 }
183 
184 static gint
get_pixel_bitdepth(GstV4l2CodecMpeg2Dec * self)185 get_pixel_bitdepth (GstV4l2CodecMpeg2Dec * self)
186 {
187   gint depth;
188 
189   switch (self->chroma_format) {
190     case 0:
191       /* 4:0:0 */
192       depth = MPEG2_BITDEPTH;
193       break;
194     case 1:
195       /* 4:2:0 */
196       depth = MPEG2_BITDEPTH + MPEG2_BITDEPTH / 2;
197       break;
198     case 2:
199       /* 4:2:2 */
200       depth = 2 * MPEG2_BITDEPTH;
201       break;
202     case 3:
203       /* 4:4:4 */
204       depth = 3 * MPEG2_BITDEPTH;
205       break;
206     default:
207       GST_WARNING_OBJECT (self, "Unsupported chroma format %i",
208           self->chroma_format);
209       depth = 0;
210       break;
211   }
212 
213   return depth;
214 }
215 
216 static gboolean
gst_v4l2_codec_mpeg2_dec_negotiate(GstVideoDecoder * decoder)217 gst_v4l2_codec_mpeg2_dec_negotiate (GstVideoDecoder * decoder)
218 {
219   GstV4l2CodecMpeg2Dec *self = GST_V4L2_CODEC_MPEG2_DEC (decoder);
220   GstMpeg2Decoder *mpeg2dec = GST_MPEG2_DECODER (decoder);
221   /* *INDENT-OFF* */
222   struct v4l2_ext_control control[] = {
223     {
224       .id = V4L2_CID_STATELESS_MPEG2_SEQUENCE,
225       .ptr = &self->v4l2_sequence,
226       .size = sizeof(self->v4l2_sequence),
227     },
228     {
229       .id = V4L2_CID_STATELESS_MPEG2_QUANTISATION,
230       .ptr = &self->v4l2_quantisation,
231       .size = sizeof(self->v4l2_quantisation),
232     },
233   };
234 
235   /* *INDENT-ON* */
236   GstCaps *filter, *caps;
237 
238   /* Ignore downstream renegotiation request. */
239   if (!self->need_negotiation)
240     return TRUE;
241   self->need_negotiation = FALSE;
242 
243   GST_DEBUG_OBJECT (self, "Negotiate");
244 
245   gst_v4l2_decoder_streamoff (self->decoder, GST_PAD_SINK);
246   gst_v4l2_decoder_streamoff (self->decoder, GST_PAD_SRC);
247 
248   gst_v4l2_codec_mpeg2_dec_reset_allocation (self);
249 
250   if (!gst_v4l2_decoder_set_sink_fmt (self->decoder, V4L2_PIX_FMT_MPEG2_SLICE,
251           self->width, self->height, get_pixel_bitdepth (self))) {
252     GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
253         ("Failed to configure mpeg2 decoder"),
254         ("gst_v4l2_decoder_set_sink_fmt() failed: %s", g_strerror (errno)));
255     gst_v4l2_decoder_close (self->decoder);
256     return FALSE;
257   }
258 
259   if (!gst_v4l2_decoder_set_controls (self->decoder, NULL, control,
260           G_N_ELEMENTS (control))) {
261     GST_ELEMENT_ERROR (decoder, RESOURCE, WRITE,
262         ("Driver does not support the selected stream."), (NULL));
263     return FALSE;
264   }
265 
266   filter = gst_v4l2_decoder_enum_src_formats (self->decoder);
267   if (!filter) {
268     GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
269         ("No supported decoder output formats"), (NULL));
270     return FALSE;
271   }
272   GST_DEBUG_OBJECT (self, "Supported output formats: %" GST_PTR_FORMAT, filter);
273 
274   caps = gst_pad_peer_query_caps (decoder->srcpad, filter);
275   gst_caps_unref (filter);
276   GST_DEBUG_OBJECT (self, "Peer supported formats: %" GST_PTR_FORMAT, caps);
277 
278   if (!gst_v4l2_decoder_select_src_format (self->decoder, caps, &self->vinfo)) {
279     GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
280         ("Unsupported bitdepth/chroma format"),
281         ("No support for %ux%u chroma IDC %i", self->width,
282             self->height, self->chroma_format));
283     gst_caps_unref (caps);
284     return FALSE;
285   }
286   gst_caps_unref (caps);
287 
288   if (self->output_state)
289     gst_video_codec_state_unref (self->output_state);
290 
291   self->output_state =
292       gst_video_decoder_set_output_state (GST_VIDEO_DECODER (self),
293       self->vinfo.finfo->format, self->width,
294       self->height, mpeg2dec->input_state);
295 
296   if (self->interlaced)
297     self->output_state->info.interlace_mode =
298         GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
299 
300   self->output_state->caps = gst_video_info_to_caps (&self->output_state->info);
301 
302   if (GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder)) {
303     if (!gst_v4l2_decoder_streamon (self->decoder, GST_PAD_SINK)) {
304       GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
305           ("Could not enable the decoder driver."),
306           ("VIDIOC_STREAMON(SINK) failed: %s", g_strerror (errno)));
307       return FALSE;
308     }
309 
310     if (!gst_v4l2_decoder_streamon (self->decoder, GST_PAD_SRC)) {
311       GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
312           ("Could not enable the decoder driver."),
313           ("VIDIOC_STREAMON(SRC) failed: %s", g_strerror (errno)));
314       return FALSE;
315     }
316 
317     return TRUE;
318   }
319 
320   return FALSE;
321 }
322 
323 static gboolean
gst_v4l2_codec_mpeg2_dec_decide_allocation(GstVideoDecoder * decoder,GstQuery * query)324 gst_v4l2_codec_mpeg2_dec_decide_allocation (GstVideoDecoder * decoder,
325     GstQuery * query)
326 {
327   GstV4l2CodecMpeg2Dec *self = GST_V4L2_CODEC_MPEG2_DEC (decoder);
328   guint min = 0, num_bitstream;
329 
330   self->has_videometa = gst_query_find_allocation_meta (query,
331       GST_VIDEO_META_API_TYPE, NULL);
332 
333   g_clear_object (&self->src_pool);
334   g_clear_object (&self->src_allocator);
335 
336   if (gst_query_get_n_allocation_pools (query) > 0)
337     gst_query_parse_nth_allocation_pool (query, 0, NULL, NULL, &min, NULL);
338 
339   min = MAX (2, min);
340   /* note the dpb size is fixed at 2 */
341   num_bitstream = 1 +
342       MAX (1, gst_v4l2_decoder_get_render_delay (self->decoder));
343 
344   self->sink_allocator = gst_v4l2_codec_allocator_new (self->decoder,
345       GST_PAD_SINK, num_bitstream);
346   self->src_allocator = gst_v4l2_codec_allocator_new (self->decoder,
347       GST_PAD_SRC, self->min_pool_size + min + 4);
348   self->src_pool = gst_v4l2_codec_pool_new (self->src_allocator, &self->vinfo);
349 
350   /* Our buffer pool is internal, we will let the base class create a video
351    * pool, and use it if we are running out of buffers or if downstream does
352    * not support GstVideoMeta */
353   return GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation
354       (decoder, query);
355 }
356 
357 
358 static GstFlowReturn
gst_v4l2_codec_mpeg2_dec_new_sequence(GstMpeg2Decoder * decoder,const GstMpegVideoSequenceHdr * seq,const GstMpegVideoSequenceExt * seq_ext,const GstMpegVideoSequenceDisplayExt * seq_display_ext,const GstMpegVideoSequenceScalableExt * seq_scalable_ext)359 gst_v4l2_codec_mpeg2_dec_new_sequence (GstMpeg2Decoder * decoder,
360     const GstMpegVideoSequenceHdr * seq,
361     const GstMpegVideoSequenceExt * seq_ext,
362     const GstMpegVideoSequenceDisplayExt * seq_display_ext,
363     const GstMpegVideoSequenceScalableExt * seq_scalable_ext)
364 {
365   GstV4l2CodecMpeg2Dec *self = GST_V4L2_CODEC_MPEG2_DEC (decoder);
366   gboolean negotiation_needed = FALSE;
367   gboolean interlaced;
368   guint16 width;
369   guint16 height;
370   guint16 vbv_buffer_size;
371   GstMpegVideoProfile mpeg_profile;
372 
373   GST_LOG_OBJECT (self, "New sequence");
374 
375   interlaced = seq_ext ? !seq_ext->progressive : FALSE;
376   if (self->interlaced != interlaced) {
377     GST_INFO_OBJECT (self, "interlaced sequence change");
378     self->interlaced = interlaced;
379     negotiation_needed = TRUE;
380   }
381 
382   width = seq->width;
383   height = seq->height;
384   vbv_buffer_size = seq->vbv_buffer_size_value;
385   if (seq_ext) {
386     width = (width & 0x0fff) | ((guint32) seq_ext->horiz_size_ext << 12);
387     height = (height & 0x0fff) | ((guint32) seq_ext->vert_size_ext << 12);
388     vbv_buffer_size = (vbv_buffer_size & 0x03ff) | ((guint32)
389         seq_ext->vbv_buffer_size_extension << 10);
390   }
391 
392   if (self->width != width || self->height != height) {
393     GST_INFO_OBJECT (self, "resolution change %dx%d -> %dx%d",
394         self->width, self->height, width, height);
395     self->width = width;
396     self->height = height;
397     negotiation_needed = TRUE;
398   }
399 
400   if (self->vbv_buffer_size != vbv_buffer_size) {
401     GST_INFO_OBJECT (self, "vbv buffer size change %d -> %d",
402         self->vbv_buffer_size, vbv_buffer_size);
403     self->vbv_buffer_size = vbv_buffer_size;
404     negotiation_needed = TRUE;
405   }
406 
407   mpeg_profile = GST_MPEG_VIDEO_PROFILE_MAIN;
408   if (seq_ext)
409     mpeg_profile = seq_ext->profile;
410 
411   if (mpeg_profile != GST_MPEG_VIDEO_PROFILE_MAIN &&
412       mpeg_profile != GST_MPEG_VIDEO_PROFILE_SIMPLE) {
413     GST_ERROR_OBJECT (self, "Cannot support profile %d", mpeg_profile);
414     return GST_FLOW_ERROR;
415   }
416 
417   if (self->profile != mpeg_profile) {
418     GST_INFO_OBJECT (self, "Profile change %d -> %d",
419         self->profile, mpeg_profile);
420     self->profile = mpeg_profile;
421     self->need_negotiation = TRUE;
422   }
423 
424   if (self->vinfo.finfo->format == GST_VIDEO_FORMAT_UNKNOWN)
425     negotiation_needed = TRUE;
426 
427   /* copy quantiser from the sequence header,
428    * if none is provided this will copy the default ones
429    * added by the parser
430    */
431   memcpy (self->v4l2_quantisation.intra_quantiser_matrix,
432       seq->intra_quantizer_matrix,
433       sizeof (self->v4l2_quantisation.intra_quantiser_matrix));;
434   memcpy (self->v4l2_quantisation.non_intra_quantiser_matrix,
435       seq->non_intra_quantizer_matrix,
436       sizeof (self->v4l2_quantisation.non_intra_quantiser_matrix));;
437 
438   /* *INDENT-OFF* */
439   self->v4l2_sequence = (struct v4l2_ctrl_mpeg2_sequence) {
440     .horizontal_size = self->width,
441     .vertical_size = self->height,
442     .vbv_buffer_size = self->vbv_buffer_size * 16 * 1024,
443     .profile_and_level_indication =
444         seq_ext ? (seq_ext->profile << 4) | (seq_ext->
445         level << 1) | seq_ext->profile_level_escape_bit : 0,
446     .chroma_format = seq_ext ? seq_ext->chroma_format : 0,
447     .flags = seq_ext->progressive ? V4L2_MPEG2_SEQ_FLAG_PROGRESSIVE : 0,
448   };
449   /* *INDENT-ON* */
450 
451   if (negotiation_needed) {
452     self->need_negotiation = TRUE;
453     if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (self))) {
454       GST_ERROR_OBJECT (self, "Failed to negotiate with downstream");
455       return GST_FLOW_ERROR;
456     }
457   } else {
458     self->need_sequence = TRUE;
459     self->need_quantiser = TRUE;
460   }
461 
462   /* Check if we can zero-copy buffers */
463   if (!self->has_videometa) {
464     GstVideoInfo ref_vinfo;
465     gint i;
466 
467     gst_video_info_set_format (&ref_vinfo, GST_VIDEO_INFO_FORMAT (&self->vinfo),
468         self->width, self->height);
469 
470     for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->vinfo); i++) {
471       if (self->vinfo.stride[i] != ref_vinfo.stride[i] ||
472           self->vinfo.offset[i] != ref_vinfo.offset[i]) {
473         GST_WARNING_OBJECT (self,
474             "GstVideoMeta support required, copying frames.");
475         self->copy_frames = TRUE;
476         break;
477       }
478     }
479   } else {
480     self->copy_frames = FALSE;
481   }
482 
483   return GST_FLOW_OK;
484 }
485 
486 static gboolean
gst_v4l2_codec_mpeg2_dec_ensure_bitstream(GstV4l2CodecMpeg2Dec * self)487 gst_v4l2_codec_mpeg2_dec_ensure_bitstream (GstV4l2CodecMpeg2Dec * self)
488 {
489   if (self->bitstream)
490     goto done;
491 
492   self->bitstream = gst_v4l2_codec_allocator_alloc (self->sink_allocator);
493 
494   if (!self->bitstream) {
495     GST_ELEMENT_ERROR (self, RESOURCE, NO_SPACE_LEFT,
496         ("Not enough memory to decode mpeg2 stream."), (NULL));
497     return FALSE;
498   }
499 
500   if (!gst_memory_map (self->bitstream, &self->bitstream_map, GST_MAP_WRITE)) {
501     GST_ELEMENT_ERROR (self, RESOURCE, WRITE,
502         ("Could not access bitstream memory for writing"), (NULL));
503     g_clear_pointer (&self->bitstream, gst_memory_unref);
504     return FALSE;
505   }
506 
507 done:
508   /* We use this field to track how much we have written */
509   self->bitstream_map.size = 0;
510 
511   return TRUE;
512 }
513 
514 static inline void
_parse_picture_coding_type(struct v4l2_ctrl_mpeg2_picture * v4l2_picture,GstMpeg2Picture * mpeg2_picture)515 _parse_picture_coding_type (struct v4l2_ctrl_mpeg2_picture *v4l2_picture,
516     GstMpeg2Picture * mpeg2_picture)
517 {
518   switch (mpeg2_picture->type) {
519     case GST_MPEG_VIDEO_PICTURE_TYPE_I:
520       v4l2_picture->picture_coding_type = V4L2_MPEG2_PIC_CODING_TYPE_I;
521       break;
522     case GST_MPEG_VIDEO_PICTURE_TYPE_P:
523       v4l2_picture->picture_coding_type = V4L2_MPEG2_PIC_CODING_TYPE_P;
524       break;
525     case GST_MPEG_VIDEO_PICTURE_TYPE_B:
526       v4l2_picture->picture_coding_type = V4L2_MPEG2_PIC_CODING_TYPE_B;
527       break;
528     case GST_MPEG_VIDEO_PICTURE_TYPE_D:
529       v4l2_picture->picture_coding_type = V4L2_MPEG2_PIC_CODING_TYPE_D;
530       break;
531   }
532 }
533 
534 static inline void
_parse_picture_structure(struct v4l2_ctrl_mpeg2_picture * v4l2_picture,GstMpeg2Slice * slice)535 _parse_picture_structure (struct v4l2_ctrl_mpeg2_picture *v4l2_picture,
536     GstMpeg2Slice * slice)
537 {
538   if (!slice->pic_ext)
539     return;
540   switch (slice->pic_ext->picture_structure) {
541     case GST_MPEG_VIDEO_PICTURE_STRUCTURE_TOP_FIELD:
542       v4l2_picture->picture_structure = V4L2_MPEG2_PIC_TOP_FIELD;
543       break;
544     case GST_MPEG_VIDEO_PICTURE_STRUCTURE_BOTTOM_FIELD:
545       v4l2_picture->picture_structure = V4L2_MPEG2_PIC_BOTTOM_FIELD;
546       break;
547     case GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME:
548       v4l2_picture->picture_structure = V4L2_MPEG2_PIC_FRAME;
549       break;
550   }
551 }
552 
553 static GstFlowReturn
gst_v4l2_codec_mpeg2_dec_start_picture(GstMpeg2Decoder * decoder,GstMpeg2Picture * picture,GstMpeg2Slice * slice,GstMpeg2Picture * prev_picture,GstMpeg2Picture * next_picture)554 gst_v4l2_codec_mpeg2_dec_start_picture (GstMpeg2Decoder * decoder,
555     GstMpeg2Picture * picture, GstMpeg2Slice * slice,
556     GstMpeg2Picture * prev_picture, GstMpeg2Picture * next_picture)
557 {
558   GstV4l2CodecMpeg2Dec *self = GST_V4L2_CODEC_MPEG2_DEC (decoder);
559 
560   /* FIXME base class should not call us if negotiation failed */
561   if (!self->sink_allocator)
562     return GST_FLOW_ERROR;
563 
564   if (!gst_v4l2_codec_mpeg2_dec_ensure_bitstream (self))
565     return GST_FLOW_ERROR;
566 
567 
568   /* *INDENT-OFF* */
569   self->v4l2_picture = (struct v4l2_ctrl_mpeg2_picture) {
570     .backward_ref_ts = next_picture ? next_picture->system_frame_number * 1000 : GST_CLOCK_TIME_NONE,
571     .forward_ref_ts = prev_picture ? prev_picture->system_frame_number * 1000 : GST_CLOCK_TIME_NONE,
572     .intra_dc_precision = slice->pic_ext ? slice->pic_ext->intra_dc_precision : 0,
573     .flags = (slice->pic_ext && slice->pic_ext->top_field_first ? V4L2_MPEG2_PIC_FLAG_TOP_FIELD_FIRST : 0) |
574              (slice->pic_ext && slice->pic_ext->frame_pred_frame_dct ? V4L2_MPEG2_PIC_FLAG_FRAME_PRED_DCT : 0 ) |
575              (slice->pic_ext && slice->pic_ext->concealment_motion_vectors ? V4L2_MPEG2_PIC_FLAG_CONCEALMENT_MV : 0) |
576              (slice->pic_ext && slice->pic_ext->q_scale_type ? V4L2_MPEG2_PIC_FLAG_Q_SCALE_TYPE : 0) |
577              (slice->pic_ext && slice->pic_ext->intra_vlc_format ? V4L2_MPEG2_PIC_FLAG_INTRA_VLC : 0) |
578              (slice->pic_ext && slice->pic_ext->alternate_scan ? V4L2_MPEG2_PIC_FLAG_ALT_SCAN : 0) |
579              (slice->pic_ext && slice->pic_ext->repeat_first_field ? V4L2_MPEG2_PIC_FLAG_REPEAT_FIRST : 0) |
580              (slice->pic_ext && slice->pic_ext->progressive_frame ? V4L2_MPEG2_PIC_FLAG_PROGRESSIVE : 0),
581   };
582   /* *INDENT-ON* */
583 
584   _parse_picture_coding_type (&self->v4l2_picture, picture);
585   _parse_picture_structure (&self->v4l2_picture, slice);
586 
587   /* slices share pic_ext and quant_matrix for the picture which might be there or not */
588   if (slice->pic_ext)
589     memcpy (&self->v4l2_picture.f_code, slice->pic_ext->f_code,
590         sizeof (self->v4l2_picture.f_code));
591 
592   /* overwrite the sequence ones if needed, see 6.1.1.6 for reference */
593   if (slice->quant_matrix) {
594     if (slice->quant_matrix->load_intra_quantiser_matrix)
595       memcpy (self->v4l2_quantisation.intra_quantiser_matrix,
596           slice->quant_matrix->intra_quantiser_matrix,
597           sizeof (self->v4l2_quantisation.intra_quantiser_matrix));
598     if (slice->quant_matrix->load_non_intra_quantiser_matrix)
599       memcpy (self->v4l2_quantisation.non_intra_quantiser_matrix,
600           slice->quant_matrix->non_intra_quantiser_matrix,
601           sizeof (self->v4l2_quantisation.non_intra_quantiser_matrix));
602     if (slice->quant_matrix->load_chroma_intra_quantiser_matrix)
603       memcpy (self->v4l2_quantisation.chroma_intra_quantiser_matrix,
604           slice->quant_matrix->chroma_intra_quantiser_matrix,
605           sizeof (self->v4l2_quantisation.chroma_intra_quantiser_matrix));
606     if (slice->quant_matrix->load_chroma_non_intra_quantiser_matrix)
607       memcpy (self->v4l2_quantisation.chroma_non_intra_quantiser_matrix,
608           slice->quant_matrix->chroma_non_intra_quantiser_matrix,
609           sizeof (self->v4l2_quantisation.chroma_non_intra_quantiser_matrix));
610 
611     self->need_quantiser |= (slice->quant_matrix->load_intra_quantiser_matrix ||
612         slice->quant_matrix->load_non_intra_quantiser_matrix ||
613         slice->quant_matrix->load_chroma_intra_quantiser_matrix ||
614         slice->quant_matrix->load_chroma_non_intra_quantiser_matrix);
615   }
616 
617   return GST_FLOW_OK;
618 }
619 
620 static gboolean
gst_v4l2_codec_mpeg2_dec_copy_output_buffer(GstV4l2CodecMpeg2Dec * self,GstVideoCodecFrame * codec_frame)621 gst_v4l2_codec_mpeg2_dec_copy_output_buffer (GstV4l2CodecMpeg2Dec * self,
622     GstVideoCodecFrame * codec_frame)
623 {
624   GstVideoFrame src_frame;
625   GstVideoFrame dest_frame;
626   GstVideoInfo dest_vinfo;
627   GstBuffer *buffer;
628 
629   gst_video_info_set_format (&dest_vinfo, GST_VIDEO_INFO_FORMAT (&self->vinfo),
630       self->width, self->height);
631 
632   buffer = gst_video_decoder_allocate_output_buffer (GST_VIDEO_DECODER (self));
633   if (!buffer)
634     goto fail;
635 
636   if (!gst_video_frame_map (&src_frame, &self->vinfo,
637           codec_frame->output_buffer, GST_MAP_READ))
638     goto fail;
639 
640   if (!gst_video_frame_map (&dest_frame, &dest_vinfo, buffer, GST_MAP_WRITE)) {
641     gst_video_frame_unmap (&dest_frame);
642     goto fail;
643   }
644 
645   GST_VIDEO_INFO_WIDTH (&src_frame.info) = self->width;
646   GST_VIDEO_INFO_HEIGHT (&src_frame.info) = self->height;
647 
648   if (!gst_video_frame_copy (&dest_frame, &src_frame)) {
649     gst_video_frame_unmap (&src_frame);
650     gst_video_frame_unmap (&dest_frame);
651     goto fail;
652   }
653 
654   gst_video_frame_unmap (&src_frame);
655   gst_video_frame_unmap (&dest_frame);
656   gst_buffer_replace (&codec_frame->output_buffer, buffer);
657   gst_buffer_unref (buffer);
658 
659   return TRUE;
660 
661 fail:
662   GST_ERROR_OBJECT (self, "Failed copy output buffer.");
663   return FALSE;
664 }
665 
666 static GstFlowReturn
gst_v4l2_codec_mpeg2_dec_output_picture(GstMpeg2Decoder * decoder,GstVideoCodecFrame * frame,GstMpeg2Picture * picture)667 gst_v4l2_codec_mpeg2_dec_output_picture (GstMpeg2Decoder * decoder,
668     GstVideoCodecFrame * frame, GstMpeg2Picture * picture)
669 {
670   GstV4l2CodecMpeg2Dec *self = GST_V4L2_CODEC_MPEG2_DEC (decoder);
671   GstVideoDecoder *vdec = GST_VIDEO_DECODER (decoder);
672   GstV4l2Request *request = gst_mpeg2_picture_get_user_data (picture);
673   gint ret;
674 
675   GST_DEBUG_OBJECT (self, "Output picture %u", picture->system_frame_number);
676 
677   ret = gst_v4l2_request_set_done (request);
678   if (ret == 0) {
679     GST_ELEMENT_ERROR (self, STREAM, DECODE,
680         ("Decoding frame %u took too long", picture->system_frame_number),
681         (NULL));
682     goto error;
683   } else if (ret < 0) {
684     GST_ELEMENT_ERROR (self, STREAM, DECODE,
685         ("Decoding request failed: %s", g_strerror (errno)), (NULL));
686     goto error;
687   }
688   g_return_val_if_fail (frame->output_buffer, GST_FLOW_ERROR);
689 
690   if (gst_v4l2_request_failed (request)) {
691     GST_ELEMENT_ERROR (self, STREAM, DECODE,
692         ("Failed to decode frame %u", picture->system_frame_number), (NULL));
693     goto error;
694   }
695 
696   /* Hold on reference buffers for the rest of the picture lifetime */
697   gst_mpeg2_picture_set_user_data (picture,
698       gst_buffer_ref (frame->output_buffer), (GDestroyNotify) gst_buffer_unref);
699 
700   if (self->copy_frames)
701     gst_v4l2_codec_mpeg2_dec_copy_output_buffer (self, frame);
702 
703   gst_mpeg2_picture_unref (picture);
704 
705   return gst_video_decoder_finish_frame (vdec, frame);
706 
707 error:
708   gst_video_decoder_drop_frame (vdec, frame);
709   gst_mpeg2_picture_unref (picture);
710 
711   return GST_FLOW_ERROR;
712 }
713 
714 static void
gst_v4l2_codec_mpeg2_dec_reset_picture(GstV4l2CodecMpeg2Dec * self)715 gst_v4l2_codec_mpeg2_dec_reset_picture (GstV4l2CodecMpeg2Dec * self)
716 {
717   if (self->bitstream) {
718     if (self->bitstream_map.memory)
719       gst_memory_unmap (self->bitstream, &self->bitstream_map);
720     g_clear_pointer (&self->bitstream, gst_memory_unref);
721     self->bitstream_map = (GstMapInfo) GST_MAP_INFO_INIT;
722   }
723 }
724 
725 static gboolean
gst_v4l2_codec_mpeg2_dec_ensure_output_buffer(GstV4l2CodecMpeg2Dec * self,GstVideoCodecFrame * frame)726 gst_v4l2_codec_mpeg2_dec_ensure_output_buffer (GstV4l2CodecMpeg2Dec * self,
727     GstVideoCodecFrame * frame)
728 {
729   GstBuffer *buffer;
730   GstFlowReturn flow_ret;
731 
732   if (frame->output_buffer)
733     return TRUE;
734 
735   flow_ret = gst_buffer_pool_acquire_buffer (GST_BUFFER_POOL (self->src_pool),
736       &buffer, NULL);
737   if (flow_ret != GST_FLOW_OK) {
738     if (flow_ret == GST_FLOW_FLUSHING)
739       GST_DEBUG_OBJECT (self, "Frame decoding aborted, we are flushing.");
740     else
741       GST_ELEMENT_ERROR (self, RESOURCE, WRITE,
742           ("No more picture buffer available."), (NULL));
743     return FALSE;
744   }
745 
746   frame->output_buffer = buffer;
747   return TRUE;
748 }
749 
750 static gboolean
gst_v4l2_codec_mpeg2_dec_submit_bitstream(GstV4l2CodecMpeg2Dec * self,GstMpeg2Picture * picture)751 gst_v4l2_codec_mpeg2_dec_submit_bitstream (GstV4l2CodecMpeg2Dec * self,
752     GstMpeg2Picture * picture)
753 {
754   GstV4l2Request *prev_request = NULL, *request = NULL;
755   gsize bytesused;
756   gboolean ret = FALSE;
757   guint count = 0;
758   guint flags = 0;
759 
760   /* *INDENT-OFF* */
761   /* Reserve space for controls */
762   struct v4l2_ext_control control[] = {
763     { }, /* sequence */
764     { }, /* picture */
765     { }, /* slice */
766     { }, /* quantization */
767   };
768   /* *INDENT-ON* */
769 
770   if (picture->structure != GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME) {
771     if (picture->first_field)
772       prev_request = gst_mpeg2_picture_get_user_data (picture->first_field);
773     else
774       flags = V4L2_BUF_FLAG_M2M_HOLD_CAPTURE_BUF;
775   }
776 
777   bytesused = self->bitstream_map.size;
778   gst_memory_unmap (self->bitstream, &self->bitstream_map);
779   self->bitstream_map = (GstMapInfo) GST_MAP_INFO_INIT;
780   gst_memory_resize (self->bitstream, 0, bytesused);
781 
782   if (prev_request) {
783     request = gst_v4l2_decoder_alloc_sub_request (self->decoder, prev_request,
784         self->bitstream);
785   } else {
786     GstVideoCodecFrame *frame;
787 
788     frame = gst_video_decoder_get_frame (GST_VIDEO_DECODER (self),
789         picture->system_frame_number);
790     g_return_val_if_fail (frame, FALSE);
791 
792     if (!gst_v4l2_codec_mpeg2_dec_ensure_output_buffer (self, frame))
793       goto done;
794 
795     request = gst_v4l2_decoder_alloc_request (self->decoder,
796         picture->system_frame_number, self->bitstream, frame->output_buffer);
797 
798     gst_video_codec_frame_unref (frame);
799   }
800 
801   if (!request) {
802     GST_ELEMENT_ERROR (self, RESOURCE, NO_SPACE_LEFT,
803         ("Failed to allocate a media request object."), (NULL));
804     goto done;
805   }
806 
807   if (self->need_sequence) {
808     control[count].id = V4L2_CID_STATELESS_MPEG2_SEQUENCE;
809     control[count].ptr = &self->v4l2_sequence;
810     control[count].size = sizeof (self->v4l2_sequence);
811     count++;
812     self->need_sequence = FALSE;
813   }
814 
815   control[count].id = V4L2_CID_STATELESS_MPEG2_PICTURE;
816   control[count].ptr = &self->v4l2_picture;
817   control[count].size = sizeof (self->v4l2_picture);
818   count++;
819 
820   if (self->need_quantiser) {
821     control[count].id = V4L2_CID_STATELESS_MPEG2_QUANTISATION;
822     control[count].ptr = &self->v4l2_quantisation;
823     control[count].size = sizeof (self->v4l2_quantisation);
824     count++;
825     self->need_quantiser = FALSE;
826   }
827 
828   if (!gst_v4l2_decoder_set_controls (self->decoder, request, control, count)) {
829     GST_ELEMENT_ERROR (self, RESOURCE, WRITE,
830         ("Driver did not accept the bitstream parameters."), (NULL));
831     goto done;
832   }
833 
834   if (!gst_v4l2_request_queue (request, flags)) {
835     GST_ELEMENT_ERROR (self, RESOURCE, WRITE,
836         ("Driver did not accept the decode request."), (NULL));
837     goto done;
838   }
839 
840   gst_mpeg2_picture_set_user_data (picture, g_steal_pointer (&request),
841       (GDestroyNotify) gst_v4l2_request_unref);
842 
843   ret = TRUE;
844 
845 done:
846   if (request)
847     gst_v4l2_request_unref (request);
848 
849   gst_v4l2_codec_mpeg2_dec_reset_picture (self);
850 
851   return ret;
852 }
853 
854 static GstFlowReturn
gst_v4l2_codec_mpeg2_dec_decode_slice(GstMpeg2Decoder * decoder,GstMpeg2Picture * picture,GstMpeg2Slice * slice)855 gst_v4l2_codec_mpeg2_dec_decode_slice (GstMpeg2Decoder * decoder,
856     GstMpeg2Picture * picture, GstMpeg2Slice * slice)
857 {
858   GstV4l2CodecMpeg2Dec *self = GST_V4L2_CODEC_MPEG2_DEC (decoder);
859   const gsize slice_size = slice->size;
860   const gsize slice_offset = slice->sc_offset;
861   const guint8 *slice_ptr = slice->packet.data + slice_offset;
862   guint8 *bitstream_ptr = self->bitstream_map.data + self->bitstream_map.size;
863 
864   if (self->bitstream_map.size + slice_size > self->bitstream_map.maxsize) {
865     GST_ELEMENT_ERROR (decoder, RESOURCE, NO_SPACE_LEFT,
866         ("Not enough space for slice."), (NULL));
867     gst_v4l2_codec_mpeg2_dec_reset_picture (self);
868     return GST_FLOW_ERROR;
869   }
870 
871   memcpy (bitstream_ptr, slice_ptr, slice_size);
872   self->bitstream_map.size += slice_size;
873 
874   return GST_FLOW_OK;
875 }
876 
877 static GstFlowReturn
gst_v4l2_codec_mpeg2_dec_end_picture(GstMpeg2Decoder * decoder,GstMpeg2Picture * picture)878 gst_v4l2_codec_mpeg2_dec_end_picture (GstMpeg2Decoder * decoder,
879     GstMpeg2Picture * picture)
880 {
881   GstV4l2CodecMpeg2Dec *self = GST_V4L2_CODEC_MPEG2_DEC (decoder);
882   /* FIXME might need to make this lazier in case we get an unpaired field */
883   if (!gst_v4l2_codec_mpeg2_dec_submit_bitstream (self, picture))
884     return GST_FLOW_ERROR;
885 
886   return GST_FLOW_OK;
887 }
888 
889 static void
gst_v4l2_codec_mpeg2_dec_set_flushing(GstV4l2CodecMpeg2Dec * self,gboolean flushing)890 gst_v4l2_codec_mpeg2_dec_set_flushing (GstV4l2CodecMpeg2Dec * self,
891     gboolean flushing)
892 {
893   if (self->sink_allocator)
894     gst_v4l2_codec_allocator_set_flushing (self->sink_allocator, flushing);
895   if (self->src_allocator)
896     gst_v4l2_codec_allocator_set_flushing (self->src_allocator, flushing);
897 }
898 
899 static gboolean
gst_v4l2_codec_mpeg2_dec_flush(GstVideoDecoder * decoder)900 gst_v4l2_codec_mpeg2_dec_flush (GstVideoDecoder * decoder)
901 {
902   GstV4l2CodecMpeg2Dec *self = GST_V4L2_CODEC_MPEG2_DEC (decoder);
903 
904   GST_DEBUG_OBJECT (self, "Flushing decoder state.");
905 
906   gst_v4l2_decoder_flush (self->decoder);
907   gst_v4l2_codec_mpeg2_dec_set_flushing (self, FALSE);
908 
909   return GST_VIDEO_DECODER_CLASS (parent_class)->flush (decoder);
910 }
911 
912 static gboolean
gst_v4l2_codec_mpeg2_dec_sink_event(GstVideoDecoder * decoder,GstEvent * event)913 gst_v4l2_codec_mpeg2_dec_sink_event (GstVideoDecoder * decoder,
914     GstEvent * event)
915 {
916   GstV4l2CodecMpeg2Dec *self = GST_V4L2_CODEC_MPEG2_DEC (decoder);
917 
918   switch (GST_EVENT_TYPE (event)) {
919     case GST_EVENT_FLUSH_START:
920       GST_DEBUG_OBJECT (self, "flush start");
921       gst_v4l2_codec_mpeg2_dec_set_flushing (self, TRUE);
922       break;
923     default:
924       break;
925   }
926 
927   return GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (decoder, event);
928 }
929 
930 static GstStateChangeReturn
gst_v4l2_codec_mpeg2_dec_change_state(GstElement * element,GstStateChange transition)931 gst_v4l2_codec_mpeg2_dec_change_state (GstElement * element,
932     GstStateChange transition)
933 {
934   GstV4l2CodecMpeg2Dec *self = GST_V4L2_CODEC_MPEG2_DEC (element);
935 
936   if (transition == GST_STATE_CHANGE_PAUSED_TO_READY)
937     gst_v4l2_codec_mpeg2_dec_set_flushing (self, TRUE);
938 
939   return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
940 }
941 
942 static void
gst_v4l2_codec_mpeg2_dec_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)943 gst_v4l2_codec_mpeg2_dec_set_property (GObject * object, guint prop_id,
944     const GValue * value, GParamSpec * pspec)
945 {
946   GstV4l2CodecMpeg2Dec *self = GST_V4L2_CODEC_MPEG2_DEC (object);
947   GObject *dec = G_OBJECT (self->decoder);
948 
949   switch (prop_id) {
950     default:
951       gst_v4l2_decoder_set_property (dec, prop_id - PROP_LAST, value, pspec);
952       break;
953   }
954 }
955 
956 static void
gst_v4l2_codec_mpeg2_dec_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)957 gst_v4l2_codec_mpeg2_dec_get_property (GObject * object, guint prop_id,
958     GValue * value, GParamSpec * pspec)
959 {
960   GstV4l2CodecMpeg2Dec *self = GST_V4L2_CODEC_MPEG2_DEC (object);
961   GObject *dec = G_OBJECT (self->decoder);
962 
963   switch (prop_id) {
964     default:
965       gst_v4l2_decoder_get_property (dec, prop_id - PROP_LAST, value, pspec);
966       break;
967   }
968 }
969 
970 static void
gst_v4l2_codec_mpeg2_dec_init(GstV4l2CodecMpeg2Dec * self)971 gst_v4l2_codec_mpeg2_dec_init (GstV4l2CodecMpeg2Dec * self)
972 {
973 }
974 
975 static void
gst_v4l2_codec_mpeg2_dec_subinit(GstV4l2CodecMpeg2Dec * self,GstV4l2CodecMpeg2DecClass * klass)976 gst_v4l2_codec_mpeg2_dec_subinit (GstV4l2CodecMpeg2Dec * self,
977     GstV4l2CodecMpeg2DecClass * klass)
978 {
979   self->decoder = gst_v4l2_decoder_new (klass->device);
980   gst_video_info_init (&self->vinfo);
981 }
982 
983 static void
gst_v4l2_codec_mpeg2_dec_dispose(GObject * object)984 gst_v4l2_codec_mpeg2_dec_dispose (GObject * object)
985 {
986   GstV4l2CodecMpeg2Dec *self = GST_V4L2_CODEC_MPEG2_DEC (object);
987 
988   g_clear_object (&self->decoder);
989 
990   G_OBJECT_CLASS (parent_class)->dispose (object);
991 }
992 
993 static void
gst_v4l2_codec_mpeg2_dec_class_init(GstV4l2CodecMpeg2DecClass * klass)994 gst_v4l2_codec_mpeg2_dec_class_init (GstV4l2CodecMpeg2DecClass * klass)
995 {
996 }
997 
998 static void
gst_v4l2_codec_mpeg2_dec_subclass_init(GstV4l2CodecMpeg2DecClass * klass,GstV4l2CodecDevice * device)999 gst_v4l2_codec_mpeg2_dec_subclass_init (GstV4l2CodecMpeg2DecClass * klass,
1000     GstV4l2CodecDevice * device)
1001 {
1002   GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
1003   GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
1004   GstVideoDecoderClass *decoder_class = GST_VIDEO_DECODER_CLASS (klass);
1005   GstMpeg2DecoderClass *mpeg2decoder_class = GST_MPEG2_DECODER_CLASS (klass);
1006 
1007   gobject_class->set_property = gst_v4l2_codec_mpeg2_dec_set_property;
1008   gobject_class->get_property = gst_v4l2_codec_mpeg2_dec_get_property;
1009   gobject_class->dispose = gst_v4l2_codec_mpeg2_dec_dispose;
1010 
1011   gst_element_class_set_static_metadata (element_class,
1012       "V4L2 Stateless Mpeg2 Video Decoder",
1013       "Codec/Decoder/Video/Hardware",
1014       "A V4L2 based Mpeg2 video decoder",
1015       "Daniel Almeida <daniel.almeida@collabora.com>");
1016 
1017   gst_element_class_add_static_pad_template (element_class, &sink_template);
1018   gst_element_class_add_static_pad_template (element_class, &src_template);
1019   element_class->change_state =
1020       GST_DEBUG_FUNCPTR (gst_v4l2_codec_mpeg2_dec_change_state);
1021 
1022   decoder_class->open = GST_DEBUG_FUNCPTR (gst_v4l2_codec_mpeg2_dec_open);
1023   decoder_class->close = GST_DEBUG_FUNCPTR (gst_v4l2_codec_mpeg2_dec_close);
1024   decoder_class->stop = GST_DEBUG_FUNCPTR (gst_v4l2_codec_mpeg2_dec_stop);
1025   decoder_class->negotiate =
1026       GST_DEBUG_FUNCPTR (gst_v4l2_codec_mpeg2_dec_negotiate);
1027   decoder_class->decide_allocation =
1028       GST_DEBUG_FUNCPTR (gst_v4l2_codec_mpeg2_dec_decide_allocation);
1029   decoder_class->flush = GST_DEBUG_FUNCPTR (gst_v4l2_codec_mpeg2_dec_flush);
1030   decoder_class->sink_event =
1031       GST_DEBUG_FUNCPTR (gst_v4l2_codec_mpeg2_dec_sink_event);
1032 
1033   mpeg2decoder_class->new_sequence =
1034       GST_DEBUG_FUNCPTR (gst_v4l2_codec_mpeg2_dec_new_sequence);
1035   mpeg2decoder_class->output_picture =
1036       GST_DEBUG_FUNCPTR (gst_v4l2_codec_mpeg2_dec_output_picture);
1037   mpeg2decoder_class->start_picture =
1038       GST_DEBUG_FUNCPTR (gst_v4l2_codec_mpeg2_dec_start_picture);
1039   mpeg2decoder_class->decode_slice =
1040       GST_DEBUG_FUNCPTR (gst_v4l2_codec_mpeg2_dec_decode_slice);
1041   mpeg2decoder_class->end_picture =
1042       GST_DEBUG_FUNCPTR (gst_v4l2_codec_mpeg2_dec_end_picture);
1043   mpeg2decoder_class->get_preferred_output_delay =
1044       GST_DEBUG_FUNCPTR (gst_v4l2_codec_mpeg2_dec_get_preferred_output_delay);
1045 
1046   klass->device = device;
1047   gst_v4l2_decoder_install_properties (gobject_class, PROP_LAST, device);
1048 }
1049 
1050 void
gst_v4l2_codec_mpeg2_dec_register(GstPlugin * plugin,GstV4l2Decoder * decoder,GstV4l2CodecDevice * device,guint rank)1051 gst_v4l2_codec_mpeg2_dec_register (GstPlugin * plugin, GstV4l2Decoder * decoder,
1052     GstV4l2CodecDevice * device, guint rank)
1053 {
1054   GstCaps *src_caps;
1055 
1056   GST_DEBUG_CATEGORY_INIT (v4l2_mpeg2dec_debug, "v4l2codecs-mpeg2dec", 0,
1057       "V4L2 stateless mpeg2 decoder");
1058 
1059   if (!gst_v4l2_decoder_set_sink_fmt (decoder, V4L2_PIX_FMT_MPEG2_SLICE,
1060           320, 240, 8))
1061     return;
1062   src_caps = gst_v4l2_decoder_enum_src_formats (decoder);
1063 
1064   if (gst_caps_is_empty (src_caps)) {
1065     GST_WARNING ("Not registering MPEG2 decoder since it produces no "
1066         "supported format");
1067     goto done;
1068   }
1069 
1070   gst_v4l2_decoder_register (plugin, GST_TYPE_V4L2_CODEC_MPEG2_DEC,
1071       (GClassInitFunc) gst_v4l2_codec_mpeg2_dec_subclass_init,
1072       gst_mini_object_ref (GST_MINI_OBJECT (device)),
1073       (GInstanceInitFunc) gst_v4l2_codec_mpeg2_dec_subinit,
1074       "v4l2sl%smpeg2dec", device, rank, NULL);
1075 
1076 done:
1077   gst_caps_unref (src_caps);
1078 }
1079