1 /*
2 * GStreamer
3 * Copyright (C) 2018 Edward Hervey <edward@centricular.com>
4 *
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
9 *
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
14 *
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
19 */
20
21 /**
22 * SECTION:element-line21decoder
23 * @title: line21decoder
24 *
25 */
26
27 #ifdef HAVE_CONFIG_H
28 # include <config.h>
29 #endif
30
31 #include <gst/gst.h>
32 #include <gst/video/video.h>
33 #include <string.h>
34
35 #include "gstline21dec.h"
36
37 GST_DEBUG_CATEGORY_STATIC (gst_line_21_decoder_debug);
38 #define GST_CAT_DEFAULT gst_line_21_decoder_debug
39
40 #define CAPS "video/x-raw, format={ I420, YUY2, YVYU, UYVY, VYUY, v210 }, interlace-mode=interleaved"
41
42 static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
43 GST_PAD_SINK,
44 GST_PAD_ALWAYS,
45 GST_STATIC_CAPS (CAPS));
46
47 static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
48 GST_PAD_SRC,
49 GST_PAD_ALWAYS,
50 GST_STATIC_CAPS (CAPS));
51
52 G_DEFINE_TYPE (GstLine21Decoder, gst_line_21_decoder, GST_TYPE_VIDEO_FILTER);
53 #define parent_class gst_line_21_decoder_parent_class
54
55 static void gst_line_21_decoder_finalize (GObject * self);
56 static gboolean gst_line_21_decoder_stop (GstBaseTransform * btrans);
57 static gboolean gst_line_21_decoder_set_info (GstVideoFilter * filter,
58 GstCaps * incaps, GstVideoInfo * in_info,
59 GstCaps * outcaps, GstVideoInfo * out_info);
60 static GstFlowReturn gst_line_21_decoder_transform_ip (GstVideoFilter * filter,
61 GstVideoFrame * frame);
62 static GstFlowReturn gst_line_21_decoder_prepare_output_buffer (GstBaseTransform
63 * trans, GstBuffer * in, GstBuffer ** out);
64
65 static void
gst_line_21_decoder_class_init(GstLine21DecoderClass * klass)66 gst_line_21_decoder_class_init (GstLine21DecoderClass * klass)
67 {
68 GObjectClass *gobject_class;
69 GstElementClass *gstelement_class;
70 GstBaseTransformClass *transform_class;
71 GstVideoFilterClass *filter_class;
72
73 gobject_class = (GObjectClass *) klass;
74 gstelement_class = (GstElementClass *) klass;
75 transform_class = (GstBaseTransformClass *) klass;
76 filter_class = (GstVideoFilterClass *) klass;
77
78 gobject_class->finalize = gst_line_21_decoder_finalize;
79
80 gst_element_class_set_static_metadata (gstelement_class,
81 "Line 21 CC Decoder",
82 "Filter/Video/ClosedCaption",
83 "Extract line21 CC from SD video streams",
84 "Edward Hervey <edward@centricular.com>");
85
86 gst_element_class_add_static_pad_template (gstelement_class, &sinktemplate);
87 gst_element_class_add_static_pad_template (gstelement_class, &srctemplate);
88
89 transform_class->stop = gst_line_21_decoder_stop;
90 transform_class->prepare_output_buffer =
91 gst_line_21_decoder_prepare_output_buffer;
92
93 filter_class->set_info = gst_line_21_decoder_set_info;
94 filter_class->transform_frame_ip = gst_line_21_decoder_transform_ip;
95
96 GST_DEBUG_CATEGORY_INIT (gst_line_21_decoder_debug, "line21decoder",
97 0, "Line 21 CC Decoder");
98 vbi_initialize_gst_debug ();
99 }
100
101 static void
gst_line_21_decoder_init(GstLine21Decoder * filter)102 gst_line_21_decoder_init (GstLine21Decoder * filter)
103 {
104 GstLine21Decoder *self = (GstLine21Decoder *) filter;
105
106 self->line21_offset = -1;
107 self->max_line_probes = 40;
108 }
109
110 static vbi_pixfmt
vbi_pixfmt_from_gst_video_format(GstVideoFormat format,gboolean * convert_v210)111 vbi_pixfmt_from_gst_video_format (GstVideoFormat format,
112 gboolean * convert_v210)
113 {
114 *convert_v210 = FALSE;
115
116 switch (format) {
117 case GST_VIDEO_FORMAT_I420:
118 return VBI_PIXFMT_YUV420;
119 case GST_VIDEO_FORMAT_YUY2:
120 return VBI_PIXFMT_YUYV;
121 case GST_VIDEO_FORMAT_YVYU:
122 return VBI_PIXFMT_YVYU;
123 case GST_VIDEO_FORMAT_UYVY:
124 return VBI_PIXFMT_UYVY;
125 case GST_VIDEO_FORMAT_VYUY:
126 return VBI_PIXFMT_VYUY;
127 /* for v210 we'll convert it to I420 luma */
128 case GST_VIDEO_FORMAT_v210:
129 *convert_v210 = TRUE;
130 return VBI_PIXFMT_YUV420;
131 /* All the other formats are not really bullet-proof. Force conversion */
132 default:
133 g_assert_not_reached ();
134 return (vbi_pixfmt) 0;
135 }
136 #undef NATIVE_VBI_FMT
137 }
138
139 static gboolean
gst_line_21_decoder_set_info(GstVideoFilter * filter,GstCaps * incaps,GstVideoInfo * in_info,GstCaps * outcaps,GstVideoInfo * out_info)140 gst_line_21_decoder_set_info (GstVideoFilter * filter,
141 GstCaps * incaps, GstVideoInfo * in_info,
142 GstCaps * outcaps, GstVideoInfo * out_info)
143 {
144 GstLine21Decoder *self = (GstLine21Decoder *) filter;
145 vbi_pixfmt fmt =
146 vbi_pixfmt_from_gst_video_format (GST_VIDEO_INFO_FORMAT (in_info),
147 &self->convert_v210);
148
149 GST_DEBUG_OBJECT (filter, "caps %" GST_PTR_FORMAT, incaps);
150 GST_DEBUG_OBJECT (filter, "plane_stride:%u , comp_stride:%u , pstride:%u",
151 GST_VIDEO_INFO_PLANE_STRIDE (in_info, 0),
152 GST_VIDEO_INFO_COMP_STRIDE (in_info, 0),
153 GST_VIDEO_INFO_COMP_PSTRIDE (in_info, 0));
154 GST_DEBUG_OBJECT (filter, "#planes : %d #components : %d",
155 GST_VIDEO_INFO_N_PLANES (in_info), GST_VIDEO_INFO_N_COMPONENTS (in_info));
156
157 if (self->info) {
158 gst_video_info_free (self->info);
159 self->info = NULL;
160 }
161
162 g_free (self->converted_lines);
163 self->converted_lines = NULL;
164
165 /* Scan the next frame from the first line */
166 self->line21_offset = -1;
167
168 if (GST_VIDEO_INFO_WIDTH (in_info) != 720) {
169 GST_DEBUG_OBJECT (filter, "Only 720 pixel wide formats are supported");
170 self->compatible_format = FALSE;
171 return TRUE;
172 }
173
174 if (fmt == 0) {
175 if (GST_VIDEO_INFO_FORMAT (in_info) == GST_VIDEO_FORMAT_v210) {
176 GST_DEBUG_OBJECT (filter,
177 "Format not supported natively, Adding conversion to YUY2");
178 self->compatible_format = TRUE;
179 self->convert_v210 = TRUE;
180 } else {
181 GST_DEBUG_OBJECT (filter, "Unsupported format");
182 self->compatible_format = FALSE;
183 }
184 return TRUE;
185 }
186
187 if (GST_VIDEO_INFO_WIDTH (in_info) == 720
188 && GST_VIDEO_INFO_HEIGHT (in_info) >= 200) {
189 GST_DEBUG_OBJECT (filter, "Compatible size!");
190 GST_DEBUG_OBJECT (filter,
191 "Compatible format plane_stride:%u , comp_stride:%u , pstride:%u",
192 GST_VIDEO_INFO_PLANE_STRIDE (in_info, 0),
193 GST_VIDEO_INFO_COMP_STRIDE (in_info, 0),
194 GST_VIDEO_INFO_COMP_PSTRIDE (in_info, 0));
195 self->compatible_format = TRUE;
196 if (self->convert_v210) {
197 self->info = gst_video_info_new ();
198 gst_video_info_set_format (self->info, GST_VIDEO_FORMAT_I420,
199 GST_VIDEO_INFO_WIDTH (in_info), GST_VIDEO_INFO_HEIGHT (in_info));
200 /* Allocate space for two *I420* Y lines (with stride) */
201 self->converted_lines =
202 g_malloc0 (2 * GST_VIDEO_INFO_COMP_STRIDE (self->info, 0));
203 } else
204 self->info = gst_video_info_copy (in_info);
205
206 /* initialize the decoder */
207 if (self->zvbi_decoder.pattern != NULL)
208 vbi_raw_decoder_reset (&self->zvbi_decoder);
209 else
210 vbi_raw_decoder_init (&self->zvbi_decoder);
211 /*
212 * Set up blank / black / white levels fit for NTSC, no actual relation
213 * with the height of the video
214 */
215 self->zvbi_decoder.scanning = 525;
216 /* The pixel format. Quite a few formats are handled by zvbi, but
217 * some are not and require conversion (or cheating) */
218 self->zvbi_decoder.sampling_format = fmt;
219 /* Sampling rate. For BT.601 it's 13.5MHz */
220 self->zvbi_decoder.sampling_rate = 13.5e6; /* Hz (i.e. BT.601) */
221 /* Stride */
222 self->zvbi_decoder.bytes_per_line =
223 GST_VIDEO_INFO_COMP_STRIDE (self->info, 0);
224 /* Sampling starts 9.7 µs from the front edge of the
225 hor. sync pulse. You may have to adjust this.
226 NOTE : This is actually ignored in the code ...
227 */
228 self->zvbi_decoder.offset = 9.7e-6 * 13.5e6;
229
230 /* The following values indicate what we are feeding to zvbi.
231 * By setting start[0] = 21, we are telling zvbi that the very
232 * beginning of the data we are feeding to it corresponds to
233 * line 21 (which is where CC1/CC3 is located).
234 *
235 * Then by specifying count[0] = 1, we are telling it to only
236 * scan 1 line from the beginning of the data.
237 *
238 * It is more efficient and flexible to do it this way, since
239 * we can then control what we are feeding it (i.e. *we* will
240 * figure out the offset to line 21, which might or might not
241 * be the beginning of the buffer data, and feed data from
242 * there). This would also allows us to have a "scanning" mode
243 * where we repeatedly provide it with pairs of lines until it
244 * finds something. */
245 self->zvbi_decoder.start[0] = 21;
246 self->zvbi_decoder.count[0] = 1;
247
248 /* Second field. */
249 self->zvbi_decoder.start[1] = 284;
250 self->zvbi_decoder.count[1] = 1;
251
252 /* FIXME : Adjust according to the info.interlace_mode ! */
253 self->zvbi_decoder.interlaced = TRUE;
254
255 /* synchronous is essentially top-field-first.
256 * WARNING : zvbi doesn't support bottom-field-first. */
257 self->zvbi_decoder.synchronous = TRUE;
258
259 /* Specify the services you want. Adjust based on whether we
260 * have PAL or NTSC */
261 vbi_raw_decoder_add_services (&self->zvbi_decoder,
262 VBI_SLICED_CAPTION_525, /* strict */ 0);
263
264 } else
265 self->compatible_format = FALSE;
266
267 return TRUE;
268 }
269
270 static GstFlowReturn
gst_line_21_decoder_prepare_output_buffer(GstBaseTransform * trans,GstBuffer * in,GstBuffer ** out)271 gst_line_21_decoder_prepare_output_buffer (GstBaseTransform * trans,
272 GstBuffer * in, GstBuffer ** out)
273 {
274 GstLine21Decoder *self = (GstLine21Decoder *) trans;
275
276 GST_DEBUG_OBJECT (trans, "compatible_format:%d", self->compatible_format);
277 if (self->compatible_format) {
278 /* Make the output buffer writable */
279 *out = gst_buffer_make_writable (in);
280 return GST_FLOW_OK;
281 }
282
283 return
284 GST_BASE_TRANSFORM_CLASS
285 (gst_line_21_decoder_parent_class)->prepare_output_buffer (trans, in,
286 out);
287 }
288
289 static void
convert_line_v210_luma(const guint8 * orig,guint8 * dest,guint width)290 convert_line_v210_luma (const guint8 * orig, guint8 * dest, guint width)
291 {
292 guint i;
293 guint32 a, b, c, d;
294 guint8 *y = dest;
295
296 for (i = 0; i < width - 5; i += 6) {
297 a = GST_READ_UINT32_LE (orig + (i / 6) * 16 + 0);
298 b = GST_READ_UINT32_LE (orig + (i / 6) * 16 + 4);
299 c = GST_READ_UINT32_LE (orig + (i / 6) * 16 + 8);
300 d = GST_READ_UINT32_LE (orig + (i / 6) * 16 + 12);
301
302 *y++ = (a >> 12) & 0xff;
303 *y++ = (b >> 2) & 0xff;
304
305 *y++ = (b >> 22) & 0xff;
306 *y++ = (c >> 12) & 0xff;
307
308 *y++ = (d >> 2) & 0xff;
309 *y++ = (d >> 22) & 0xff;
310 }
311 }
312
313 static guint8 *
get_video_data(GstLine21Decoder * self,GstVideoFrame * frame,gint line)314 get_video_data (GstLine21Decoder * self, GstVideoFrame * frame, gint line)
315 {
316 guint8 *data = self->converted_lines;
317 guint8 *v210;
318
319 if (!self->convert_v210)
320 return (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (frame,
321 0) + line * GST_VIDEO_INFO_COMP_STRIDE (self->info, 0);
322
323 v210 = (guint8 *)
324 GST_VIDEO_FRAME_PLANE_DATA (frame,
325 0) + line * GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
326
327 /* Convert v210 to I420 */
328 convert_line_v210_luma (v210, data, GST_VIDEO_FRAME_WIDTH (frame));
329 v210 += GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
330 convert_line_v210_luma (v210, data + GST_VIDEO_INFO_COMP_STRIDE (self->info,
331 0), GST_VIDEO_FRAME_WIDTH (frame));
332 GST_MEMDUMP ("converted", self->converted_lines, 64);
333 return self->converted_lines;
334 }
335
336 /* Call this to scan for CC
337 * Returns TRUE if it was found and set, else FALSE */
338 static gboolean
gst_line_21_decoder_scan(GstLine21Decoder * self,GstVideoFrame * frame)339 gst_line_21_decoder_scan (GstLine21Decoder * self, GstVideoFrame * frame)
340 {
341 gint i;
342 vbi_sliced sliced[52];
343 gboolean found = FALSE;
344 guint8 *data;
345
346 GST_DEBUG_OBJECT (self, "Starting probing. max_line_probes:%d",
347 self->max_line_probes);
348
349 i = self->line21_offset;
350 if (i == -1) {
351 GST_DEBUG_OBJECT (self, "Scanning from the beginning");
352 i = 0;
353 }
354
355 for (; i < self->max_line_probes && i < GST_VIDEO_FRAME_HEIGHT (frame); i++) {
356 gint n_lines;
357 data = get_video_data (self, frame, i);
358 /* Scan until we get n_lines == 2 */
359 n_lines = vbi_raw_decode (&self->zvbi_decoder, data, sliced);
360 GST_DEBUG_OBJECT (self, "i:%d n_lines:%d", i, n_lines);
361 if (n_lines == 2) {
362 GST_DEBUG_OBJECT (self, "Found 2 CC lines at offset %d", i);
363 self->line21_offset = i;
364 found = TRUE;
365 break;
366 } else if (i == self->line21_offset) {
367 /* Otherwise if this was the previously probed line offset,
368 * reset and start searching again from the beginning */
369 i = -1;
370 self->line21_offset = -1;
371 }
372 }
373
374 if (!found) {
375 GST_DEBUG_OBJECT (self, "No CC found");
376 self->line21_offset = -1;
377 } else {
378 guint base_line1 = 0, base_line2 = 0;
379 guint8 ccdata[6] = { 0x80, 0x80, 0x80, 0x00, 0x80, 0x80 }; /* Initialize the ccdata */
380
381 if (GST_VIDEO_FRAME_HEIGHT (frame) == 525) {
382 base_line1 = 9;
383 base_line2 = 272;
384 } else if (GST_VIDEO_FRAME_HEIGHT (frame) == 625) {
385 base_line1 = 5;
386 base_line2 = 318;
387 }
388
389 ccdata[0] |= (base_line1 < i ? i - base_line1 : 0) & 0x1f;
390 ccdata[1] = sliced[0].data[0];
391 ccdata[2] = sliced[0].data[1];
392 ccdata[3] |= (base_line2 < i ? i - base_line2 : 0) & 0x1f;
393 ccdata[4] = sliced[1].data[0];
394 ccdata[5] = sliced[1].data[1];
395 gst_buffer_add_video_caption_meta (frame->buffer,
396 GST_VIDEO_CAPTION_TYPE_CEA608_S334_1A, ccdata, 6);
397 GST_TRACE_OBJECT (self,
398 "Got CC 0x%02x 0x%02x / 0x%02x 0x%02x '%c%c / %c%c'", ccdata[1],
399 ccdata[2], ccdata[4], ccdata[5],
400 g_ascii_isprint (ccdata[1] & 0x7f) ? ccdata[1] & 0x7f : '.',
401 g_ascii_isprint (ccdata[2] & 0x7f) ? ccdata[2] & 0x7f : '.',
402 g_ascii_isprint (ccdata[4] & 0x7f) ? ccdata[4] & 0x7f : '.',
403 g_ascii_isprint (ccdata[5] & 0x7f) ? ccdata[5] & 0x7f : '.');
404
405 }
406
407 return found;
408 }
409
410 static GstFlowReturn
gst_line_21_decoder_transform_ip(GstVideoFilter * filter,GstVideoFrame * frame)411 gst_line_21_decoder_transform_ip (GstVideoFilter * filter,
412 GstVideoFrame * frame)
413 {
414 GstLine21Decoder *self = (GstLine21Decoder *) filter;
415
416 if (!self->compatible_format)
417 return GST_FLOW_OK;
418
419 gst_line_21_decoder_scan (self, frame);
420 return GST_FLOW_OK;
421 }
422
423 static gboolean
gst_line_21_decoder_stop(GstBaseTransform * btrans)424 gst_line_21_decoder_stop (GstBaseTransform * btrans)
425 {
426 GstLine21Decoder *self = (GstLine21Decoder *) btrans;
427
428 vbi_raw_decoder_destroy (&self->zvbi_decoder);
429
430 return TRUE;
431 }
432
433 static void
gst_line_21_decoder_finalize(GObject * object)434 gst_line_21_decoder_finalize (GObject * object)
435 {
436 GstLine21Decoder *self = (GstLine21Decoder *) object;
437
438 if (self->info) {
439 gst_video_info_free (self->info);
440 self->info = NULL;
441 }
442 g_free (self->converted_lines);
443 self->converted_lines = NULL;
444
445 G_OBJECT_CLASS (parent_class)->finalize (object);
446 }
447