1 /*
2 * GStreamer
3 * Copyright (C) 2018 Edward Hervey <edward@centricular.com>
4 *
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
9 *
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
14 *
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
19 */
20
21 /**
22 * SECTION:element-line21decoder
23 * @title: line21decoder
24 *
25 */
26
27 #ifdef HAVE_CONFIG_H
28 # include <config.h>
29 #endif
30
31 #include <gst/gst.h>
32 #include <gst/video/video.h>
33 #include <string.h>
34
35 #include "gstline21dec.h"
36
37 GST_DEBUG_CATEGORY_STATIC (gst_line_21_decoder_debug);
38 #define GST_CAT_DEFAULT gst_line_21_decoder_debug
39
40 /**
41 * GstLine21DecoderMode:
42 * @GST_LINE_21_DECODER_MODE_ADD: add new CC meta on top of other CC meta, if any
43 * @GST_LINE_21_DECODER_MODE_DROP: ignore CC if a CC meta was already present
44 * @GST_LINE_21_DECODER_MODE_REPLACE: replace existing CC meta
45 *
46 * Since: 1.20
47 */
48
49 enum
50 {
51 PROP_0,
52 PROP_NTSC_ONLY,
53 PROP_MODE,
54 };
55
56 #define DEFAULT_NTSC_ONLY FALSE
57 #define DEFAULT_MODE GST_LINE_21_DECODER_MODE_ADD
58
59 #define CAPS "video/x-raw, format={ I420, YUY2, YVYU, UYVY, VYUY, v210 }"
60
61 static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
62 GST_PAD_SINK,
63 GST_PAD_ALWAYS,
64 GST_STATIC_CAPS (CAPS));
65
66 static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
67 GST_PAD_SRC,
68 GST_PAD_ALWAYS,
69 GST_STATIC_CAPS (CAPS));
70
71 #define parent_class gst_line_21_decoder_parent_class
72 G_DEFINE_TYPE (GstLine21Decoder, gst_line_21_decoder, GST_TYPE_VIDEO_FILTER);
73 GST_ELEMENT_REGISTER_DEFINE (line21decoder, "line21decoder",
74 GST_RANK_NONE, GST_TYPE_LINE21DECODER);
75
76 #define GST_TYPE_LINE_21_DECODER_MODE (gst_line_21_decoder_mode_get_type())
77 static GType
gst_line_21_decoder_mode_get_type(void)78 gst_line_21_decoder_mode_get_type (void)
79 {
80 static const GEnumValue values[] = {
81 {GST_LINE_21_DECODER_MODE_ADD,
82 "add new CC meta on top of other CC meta, if any", "add"},
83 {GST_LINE_21_DECODER_MODE_DROP,
84 "ignore CC if a CC meta was already present",
85 "drop"},
86 {GST_LINE_21_DECODER_MODE_REPLACE,
87 "replace existing CC meta", "replace"},
88 {0, NULL, NULL}
89 };
90 static volatile GType id = 0;
91
92 if (g_once_init_enter ((gsize *) & id)) {
93 GType _id;
94
95 _id = g_enum_register_static ("GstLine21DecoderMode", values);
96
97 g_once_init_leave ((gsize *) & id, _id);
98 }
99
100 return id;
101 }
102
103 static void gst_line_21_decoder_finalize (GObject * self);
104 static gboolean gst_line_21_decoder_stop (GstBaseTransform * btrans);
105 static gboolean gst_line_21_decoder_set_info (GstVideoFilter * filter,
106 GstCaps * incaps, GstVideoInfo * in_info,
107 GstCaps * outcaps, GstVideoInfo * out_info);
108 static GstFlowReturn gst_line_21_decoder_transform_ip (GstVideoFilter * filter,
109 GstVideoFrame * frame);
110 static GstFlowReturn gst_line_21_decoder_prepare_output_buffer (GstBaseTransform
111 * trans, GstBuffer * in, GstBuffer ** out);
112
113 static void
gst_line_21_decoder_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)114 gst_line_21_decoder_set_property (GObject * object, guint prop_id,
115 const GValue * value, GParamSpec * pspec)
116 {
117 GstLine21Decoder *enc = GST_LINE21DECODER (object);
118
119 switch (prop_id) {
120 case PROP_MODE:
121 enc->mode = g_value_get_enum (value);
122 break;
123 case PROP_NTSC_ONLY:
124 enc->ntsc_only = g_value_get_boolean (value);
125 break;
126 default:
127 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
128 break;
129 }
130 }
131
132 static void
gst_line_21_decoder_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)133 gst_line_21_decoder_get_property (GObject * object, guint prop_id,
134 GValue * value, GParamSpec * pspec)
135 {
136 GstLine21Decoder *enc = GST_LINE21DECODER (object);
137
138 switch (prop_id) {
139 case PROP_MODE:
140 g_value_set_enum (value, enc->mode);
141 break;
142 case PROP_NTSC_ONLY:
143 g_value_set_boolean (value, enc->ntsc_only);
144 break;
145 default:
146 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
147 break;
148 }
149 }
150
151 static void
gst_line_21_decoder_class_init(GstLine21DecoderClass * klass)152 gst_line_21_decoder_class_init (GstLine21DecoderClass * klass)
153 {
154 GObjectClass *gobject_class;
155 GstElementClass *gstelement_class;
156 GstBaseTransformClass *transform_class;
157 GstVideoFilterClass *filter_class;
158
159 gobject_class = (GObjectClass *) klass;
160 gstelement_class = (GstElementClass *) klass;
161 transform_class = (GstBaseTransformClass *) klass;
162 filter_class = (GstVideoFilterClass *) klass;
163
164 gobject_class->finalize = gst_line_21_decoder_finalize;
165 gobject_class->set_property = gst_line_21_decoder_set_property;
166 gobject_class->get_property = gst_line_21_decoder_get_property;
167
168 /**
169 * line21decoder:ntsc-only
170 *
171 * Whether line 21 decoding should only be attempted when the
172 * input resolution matches NTSC (720 x 525) or NTSC usable
173 * lines (720 x 486)
174 *
175 * Since: 1.20
176 */
177 g_object_class_install_property (G_OBJECT_CLASS (klass),
178 PROP_NTSC_ONLY, g_param_spec_boolean ("ntsc-only",
179 "NTSC only",
180 "Whether line 21 decoding should only be attempted when the "
181 "input resolution matches NTSC", DEFAULT_NTSC_ONLY,
182 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
183
184 /**
185 * GstLine21Decoder:mode
186 *
187 * Control whether and how detected CC meta should be inserted
188 * in the list of existing CC meta on a frame (if any).
189 *
190 * Since: 1.20
191 */
192 g_object_class_install_property (G_OBJECT_CLASS (klass),
193 PROP_MODE, g_param_spec_enum ("mode",
194 "Mode",
195 "Control whether and how detected CC meta should be inserted "
196 "in the list of existing CC meta on a frame (if any).",
197 GST_TYPE_LINE_21_DECODER_MODE, DEFAULT_MODE,
198 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
199
200 gst_element_class_set_static_metadata (gstelement_class,
201 "Line 21 CC Decoder",
202 "Filter/Video/ClosedCaption",
203 "Extract line21 CC from SD video streams",
204 "Edward Hervey <edward@centricular.com>");
205
206 gst_element_class_add_static_pad_template (gstelement_class, &sinktemplate);
207 gst_element_class_add_static_pad_template (gstelement_class, &srctemplate);
208
209 transform_class->stop = gst_line_21_decoder_stop;
210 transform_class->prepare_output_buffer =
211 gst_line_21_decoder_prepare_output_buffer;
212
213 filter_class->set_info = gst_line_21_decoder_set_info;
214 filter_class->transform_frame_ip = gst_line_21_decoder_transform_ip;
215
216 GST_DEBUG_CATEGORY_INIT (gst_line_21_decoder_debug, "line21decoder",
217 0, "Line 21 CC Decoder");
218 vbi_initialize_gst_debug ();
219
220 gst_type_mark_as_plugin_api (GST_TYPE_LINE_21_DECODER_MODE, 0);
221 }
222
223 static void
gst_line_21_decoder_init(GstLine21Decoder * filter)224 gst_line_21_decoder_init (GstLine21Decoder * filter)
225 {
226 GstLine21Decoder *self = (GstLine21Decoder *) filter;
227
228 self->info = NULL;
229 self->line21_offset = -1;
230 self->max_line_probes = 40;
231 self->ntsc_only = DEFAULT_NTSC_ONLY;
232 self->mode = DEFAULT_MODE;
233 }
234
235 static vbi_pixfmt
vbi_pixfmt_from_gst_video_format(GstVideoFormat format,gboolean * convert_v210)236 vbi_pixfmt_from_gst_video_format (GstVideoFormat format,
237 gboolean * convert_v210)
238 {
239 *convert_v210 = FALSE;
240
241 switch (format) {
242 case GST_VIDEO_FORMAT_I420:
243 return VBI_PIXFMT_YUV420;
244 case GST_VIDEO_FORMAT_YUY2:
245 return VBI_PIXFMT_YUYV;
246 case GST_VIDEO_FORMAT_YVYU:
247 return VBI_PIXFMT_YVYU;
248 case GST_VIDEO_FORMAT_UYVY:
249 return VBI_PIXFMT_UYVY;
250 case GST_VIDEO_FORMAT_VYUY:
251 return VBI_PIXFMT_VYUY;
252 /* for v210 we'll convert it to I420 luma */
253 case GST_VIDEO_FORMAT_v210:
254 *convert_v210 = TRUE;
255 return VBI_PIXFMT_YUV420;
256 /* All the other formats are not really bullet-proof. Force conversion */
257 default:
258 g_assert_not_reached ();
259 return (vbi_pixfmt) 0;
260 }
261 #undef NATIVE_VBI_FMT
262 }
263
264 static gboolean
gst_line_21_decoder_set_info(GstVideoFilter * filter,GstCaps * incaps,GstVideoInfo * in_info,GstCaps * outcaps,GstVideoInfo * out_info)265 gst_line_21_decoder_set_info (GstVideoFilter * filter,
266 GstCaps * incaps, GstVideoInfo * in_info,
267 GstCaps * outcaps, GstVideoInfo * out_info)
268 {
269 GstLine21Decoder *self = (GstLine21Decoder *) filter;
270 vbi_pixfmt fmt =
271 vbi_pixfmt_from_gst_video_format (GST_VIDEO_INFO_FORMAT (in_info),
272 &self->convert_v210);
273
274 GST_DEBUG_OBJECT (filter, "caps %" GST_PTR_FORMAT, incaps);
275 GST_DEBUG_OBJECT (filter, "plane_stride:%u , comp_stride:%u , pstride:%u",
276 GST_VIDEO_INFO_PLANE_STRIDE (in_info, 0),
277 GST_VIDEO_INFO_COMP_STRIDE (in_info, 0),
278 GST_VIDEO_INFO_COMP_PSTRIDE (in_info, 0));
279 GST_DEBUG_OBJECT (filter, "#planes : %d #components : %d",
280 GST_VIDEO_INFO_N_PLANES (in_info), GST_VIDEO_INFO_N_COMPONENTS (in_info));
281
282 if (self->info) {
283 gst_video_info_free (self->info);
284 self->info = NULL;
285 }
286
287 g_free (self->converted_lines);
288 self->converted_lines = NULL;
289
290 /* Scan the next frame from the first line */
291 self->line21_offset = -1;
292
293 if (!GST_VIDEO_INFO_IS_INTERLACED (in_info)) {
294 GST_DEBUG_OBJECT (filter, "Only interlaced formats are supported");
295 self->compatible_format = FALSE;
296 return TRUE;
297 }
298
299 if (GST_VIDEO_INFO_WIDTH (in_info) != 720) {
300 GST_DEBUG_OBJECT (filter, "Only 720 pixel wide formats are supported");
301 self->compatible_format = FALSE;
302 return TRUE;
303 }
304
305 if (self->ntsc_only &&
306 GST_VIDEO_INFO_HEIGHT (in_info) != 525 &&
307 GST_VIDEO_INFO_HEIGHT (in_info) != 486) {
308 GST_DEBUG_OBJECT (filter,
309 "NTSC-only, only 525 or 486 pixel high formats are supported");
310 self->compatible_format = FALSE;
311 return TRUE;
312 }
313
314 if (fmt == 0) {
315 if (GST_VIDEO_INFO_FORMAT (in_info) == GST_VIDEO_FORMAT_v210) {
316 GST_DEBUG_OBJECT (filter,
317 "Format not supported natively, Adding conversion to YUY2");
318 self->compatible_format = TRUE;
319 self->convert_v210 = TRUE;
320 } else {
321 GST_DEBUG_OBJECT (filter, "Unsupported format");
322 self->compatible_format = FALSE;
323 }
324 return TRUE;
325 }
326
327 if (GST_VIDEO_INFO_WIDTH (in_info) == 720
328 && GST_VIDEO_INFO_HEIGHT (in_info) >= 200) {
329 GST_DEBUG_OBJECT (filter, "Compatible size!");
330 GST_DEBUG_OBJECT (filter,
331 "Compatible format plane_stride:%u , comp_stride:%u , pstride:%u",
332 GST_VIDEO_INFO_PLANE_STRIDE (in_info, 0),
333 GST_VIDEO_INFO_COMP_STRIDE (in_info, 0),
334 GST_VIDEO_INFO_COMP_PSTRIDE (in_info, 0));
335 self->compatible_format = TRUE;
336 if (self->convert_v210) {
337 self->info = gst_video_info_new ();
338 gst_video_info_set_format (self->info, GST_VIDEO_FORMAT_I420,
339 GST_VIDEO_INFO_WIDTH (in_info), GST_VIDEO_INFO_HEIGHT (in_info));
340 /* Allocate space for two *I420* Y lines (with stride) */
341 self->converted_lines =
342 g_malloc0 (2 * GST_VIDEO_INFO_COMP_STRIDE (self->info, 0));
343 } else
344 self->info = gst_video_info_copy (in_info);
345
346 /* initialize the decoder */
347 if (self->zvbi_decoder.pattern != NULL)
348 vbi_raw_decoder_reset (&self->zvbi_decoder);
349 else
350 vbi_raw_decoder_init (&self->zvbi_decoder);
351 /*
352 * Set up blank / black / white levels fit for NTSC, no actual relation
353 * with the height of the video
354 */
355 self->zvbi_decoder.scanning = 525;
356 /* The pixel format. Quite a few formats are handled by zvbi, but
357 * some are not and require conversion (or cheating) */
358 self->zvbi_decoder.sampling_format = fmt;
359 /* Sampling rate. For BT.601 it's 13.5MHz */
360 self->zvbi_decoder.sampling_rate = 13.5e6; /* Hz (i.e. BT.601) */
361 /* Stride */
362 self->zvbi_decoder.bytes_per_line =
363 GST_VIDEO_INFO_COMP_STRIDE (self->info, 0);
364 /* Sampling starts 9.7 µs from the front edge of the
365 hor. sync pulse. You may have to adjust this.
366 NOTE : This is actually ignored in the code ...
367 */
368 self->zvbi_decoder.offset = 9.7e-6 * 13.5e6;
369
370 /* The following values indicate what we are feeding to zvbi.
371 * By setting start[0] = 21, we are telling zvbi that the very
372 * beginning of the data we are feeding to it corresponds to
373 * line 21 (which is where CC1/CC3 is located).
374 *
375 * Then by specifying count[0] = 1, we are telling it to only
376 * scan 1 line from the beginning of the data.
377 *
378 * It is more efficient and flexible to do it this way, since
379 * we can then control what we are feeding it (i.e. *we* will
380 * figure out the offset to line 21, which might or might not
381 * be the beginning of the buffer data, and feed data from
382 * there). This would also allows us to have a "scanning" mode
383 * where we repeatedly provide it with pairs of lines until it
384 * finds something. */
385 self->zvbi_decoder.start[0] = 21;
386 self->zvbi_decoder.count[0] = 1;
387
388 /* Second field. */
389 self->zvbi_decoder.start[1] = 284;
390 self->zvbi_decoder.count[1] = 1;
391
392 /* FIXME : Adjust according to the info.interlace_mode ! */
393 self->zvbi_decoder.interlaced = TRUE;
394
395 /* synchronous is essentially top-field-first.
396 * WARNING : zvbi doesn't support bottom-field-first. */
397 self->zvbi_decoder.synchronous = TRUE;
398
399 /* Specify the services you want. Adjust based on whether we
400 * have PAL or NTSC */
401 vbi_raw_decoder_add_services (&self->zvbi_decoder,
402 VBI_SLICED_CAPTION_525, /* strict */ 0);
403
404 } else
405 self->compatible_format = FALSE;
406
407 return TRUE;
408 }
409
410 static GstFlowReturn
gst_line_21_decoder_prepare_output_buffer(GstBaseTransform * trans,GstBuffer * in,GstBuffer ** out)411 gst_line_21_decoder_prepare_output_buffer (GstBaseTransform * trans,
412 GstBuffer * in, GstBuffer ** out)
413 {
414 GstLine21Decoder *self = (GstLine21Decoder *) trans;
415
416 GST_DEBUG_OBJECT (trans, "compatible_format:%d", self->compatible_format);
417 if (self->compatible_format) {
418 /* Make the output buffer writable */
419 *out = gst_buffer_make_writable (in);
420 return GST_FLOW_OK;
421 }
422
423 return
424 GST_BASE_TRANSFORM_CLASS
425 (gst_line_21_decoder_parent_class)->prepare_output_buffer (trans, in,
426 out);
427 }
428
429 static void
convert_line_v210_luma(const guint8 * orig,guint8 * dest,guint width)430 convert_line_v210_luma (const guint8 * orig, guint8 * dest, guint width)
431 {
432 guint i;
433 guint32 a, b, c, d;
434 guint8 *y = dest;
435
436 for (i = 0; i < width - 5; i += 6) {
437 a = GST_READ_UINT32_LE (orig + (i / 6) * 16 + 0);
438 b = GST_READ_UINT32_LE (orig + (i / 6) * 16 + 4);
439 c = GST_READ_UINT32_LE (orig + (i / 6) * 16 + 8);
440 d = GST_READ_UINT32_LE (orig + (i / 6) * 16 + 12);
441
442 *y++ = (a >> 12) & 0xff;
443 *y++ = (b >> 2) & 0xff;
444
445 *y++ = (b >> 22) & 0xff;
446 *y++ = (c >> 12) & 0xff;
447
448 *y++ = (d >> 2) & 0xff;
449 *y++ = (d >> 22) & 0xff;
450 }
451 }
452
453 static guint8 *
get_video_data(GstLine21Decoder * self,GstVideoFrame * frame,gint line)454 get_video_data (GstLine21Decoder * self, GstVideoFrame * frame, gint line)
455 {
456 guint8 *data = self->converted_lines;
457 guint8 *v210;
458
459 if (!self->convert_v210)
460 return (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (frame,
461 0) + line * GST_VIDEO_INFO_COMP_STRIDE (self->info, 0);
462
463 v210 = (guint8 *)
464 GST_VIDEO_FRAME_PLANE_DATA (frame,
465 0) + line * GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
466
467 /* Convert v210 to I420 */
468 convert_line_v210_luma (v210, data, GST_VIDEO_FRAME_WIDTH (frame));
469 v210 += GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
470 convert_line_v210_luma (v210, data + GST_VIDEO_INFO_COMP_STRIDE (self->info,
471 0), GST_VIDEO_FRAME_WIDTH (frame));
472 GST_MEMDUMP ("converted", self->converted_lines, 64);
473 return self->converted_lines;
474 }
475
476 static gboolean
drop_cc_meta(GstBuffer * buffer,GstMeta ** meta,gpointer unused)477 drop_cc_meta (GstBuffer * buffer, GstMeta ** meta, gpointer unused)
478 {
479 if ((*meta)->info->api == GST_VIDEO_CAPTION_META_API_TYPE)
480 *meta = NULL;
481
482 return TRUE;
483 }
484
485 /* Call this to scan for CC
486 * Returns TRUE if it was found and set, else FALSE */
487 static gboolean
gst_line_21_decoder_scan(GstLine21Decoder * self,GstVideoFrame * frame)488 gst_line_21_decoder_scan (GstLine21Decoder * self, GstVideoFrame * frame)
489 {
490 gint i;
491 vbi_sliced sliced[52];
492 gboolean found = FALSE;
493 guint8 *data;
494
495 if (self->mode == GST_LINE_21_DECODER_MODE_DROP &&
496 gst_buffer_get_n_meta (frame->buffer,
497 GST_VIDEO_CAPTION_META_API_TYPE) > 0) {
498 GST_DEBUG_OBJECT (self, "Mode drop and buffer had CC meta, ignoring");
499 return FALSE;
500 }
501
502 GST_DEBUG_OBJECT (self, "Starting probing. max_line_probes:%d",
503 self->max_line_probes);
504
505 i = self->line21_offset;
506 if (i == -1) {
507 GST_DEBUG_OBJECT (self, "Scanning from the beginning");
508 i = 0;
509 }
510
511 for (; i < self->max_line_probes && i < GST_VIDEO_FRAME_HEIGHT (frame); i++) {
512 gint n_lines;
513 data = get_video_data (self, frame, i);
514 /* Scan until we get n_lines == 2 */
515 n_lines = vbi_raw_decode (&self->zvbi_decoder, data, sliced);
516 GST_DEBUG_OBJECT (self, "i:%d n_lines:%d", i, n_lines);
517 if (n_lines == 2) {
518 GST_DEBUG_OBJECT (self, "Found 2 CC lines at offset %d", i);
519 self->line21_offset = i;
520 found = TRUE;
521 break;
522 } else if (i == self->line21_offset) {
523 /* Otherwise if this was the previously probed line offset,
524 * reset and start searching again from the beginning */
525 i = -1;
526 self->line21_offset = -1;
527 }
528 }
529
530 if (!found) {
531 self->line21_offset = -1;
532 } else {
533 guint base_line1 = 0, base_line2 = 0;
534 guint8 ccdata[6] = { 0x80, 0x80, 0x80, 0x00, 0x80, 0x80 }; /* Initialize the ccdata */
535
536 if (GST_VIDEO_FRAME_HEIGHT (frame) == 525) {
537 base_line1 = 9;
538 base_line2 = 272;
539 } else if (GST_VIDEO_FRAME_HEIGHT (frame) == 625) {
540 base_line1 = 5;
541 base_line2 = 318;
542 }
543
544 if (self->mode == GST_LINE_21_DECODER_MODE_REPLACE) {
545 GST_DEBUG_OBJECT (self,
546 "Mode replace and new CC meta, removing existing CC meta");
547 gst_buffer_foreach_meta (frame->buffer, drop_cc_meta, NULL);
548 }
549
550 ccdata[0] |= (base_line1 < i ? i - base_line1 : 0) & 0x1f;
551 ccdata[1] = sliced[0].data[0];
552 ccdata[2] = sliced[0].data[1];
553 ccdata[3] |= (base_line2 < i ? i - base_line2 : 0) & 0x1f;
554 ccdata[4] = sliced[1].data[0];
555 ccdata[5] = sliced[1].data[1];
556 gst_buffer_add_video_caption_meta (frame->buffer,
557 GST_VIDEO_CAPTION_TYPE_CEA608_S334_1A, ccdata, 6);
558 GST_TRACE_OBJECT (self,
559 "Got CC 0x%02x 0x%02x / 0x%02x 0x%02x '%c%c / %c%c'", ccdata[1],
560 ccdata[2], ccdata[4], ccdata[5],
561 g_ascii_isprint (ccdata[1] & 0x7f) ? ccdata[1] & 0x7f : '.',
562 g_ascii_isprint (ccdata[2] & 0x7f) ? ccdata[2] & 0x7f : '.',
563 g_ascii_isprint (ccdata[4] & 0x7f) ? ccdata[4] & 0x7f : '.',
564 g_ascii_isprint (ccdata[5] & 0x7f) ? ccdata[5] & 0x7f : '.');
565
566 }
567
568 return found;
569 }
570
571 static GstFlowReturn
gst_line_21_decoder_transform_ip(GstVideoFilter * filter,GstVideoFrame * frame)572 gst_line_21_decoder_transform_ip (GstVideoFilter * filter,
573 GstVideoFrame * frame)
574 {
575 GstLine21Decoder *self = (GstLine21Decoder *) filter;
576
577 if (!self->compatible_format)
578 return GST_FLOW_OK;
579
580 gst_line_21_decoder_scan (self, frame);
581 return GST_FLOW_OK;
582 }
583
584 static gboolean
gst_line_21_decoder_stop(GstBaseTransform * btrans)585 gst_line_21_decoder_stop (GstBaseTransform * btrans)
586 {
587 GstLine21Decoder *self = (GstLine21Decoder *) btrans;
588
589 vbi_raw_decoder_destroy (&self->zvbi_decoder);
590 if (self->info) {
591 gst_video_info_free (self->info);
592 self->info = NULL;
593 }
594
595 return TRUE;
596 }
597
598 static void
gst_line_21_decoder_finalize(GObject * object)599 gst_line_21_decoder_finalize (GObject * object)
600 {
601 GstLine21Decoder *self = (GstLine21Decoder *) object;
602
603 if (self->info) {
604 gst_video_info_free (self->info);
605 self->info = NULL;
606 }
607 g_free (self->converted_lines);
608 self->converted_lines = NULL;
609
610 G_OBJECT_CLASS (parent_class)->finalize (object);
611 }
612