• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* GStreamer
2  * Copyright (C) 2018 Edward Hervey <edward@centricular.com>
3  * Copyright (C) 2018 Sebastian Dröge <sebastian@centricular.com>
4  *
5  * This library is free software; you can redistribute it and/or
6  * modify it under the terms of the GNU Library General Public
7  * License as published by the Free Software Foundation; either
8  * version 2 of the License, or (at your option) any later version.
9  *
10  * This library is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13  * Library General Public License for more details.
14  *
15  * You should have received a copy of the GNU Library General Public
16  * License along with this library; if not, write to the
17  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18  * Boston, MA 02110-1301, USA.
19  */
20 
21 #ifdef HAVE_CONFIG_H
22 #  include "config.h"
23 #endif
24 
25 #include <string.h>
26 #include <gst/base/gstbytereader.h>
27 #include "video-anc.h"
28 
29 #if !GLIB_CHECK_VERSION(2, 47, 4)
30 #ifdef __GNUC__
31 #define G_GNUC_CHECK_VERSION(major, minor) \
32     ((__GNUC__ > (major)) || \
33      ((__GNUC__ == (major)) && \
34       (__GNUC_MINOR__ >= (minor))))
35 #else
36 #define G_GNUC_CHECK_VERSION(major, minor) 0
37 #endif
38 #endif
39 
40 /**
41  * SECTION:gstvideoanc
42  * @title: GstVideo Ancillary
43  * @short_description: Utilities for Ancillary data, VBI and Closed Caption
44  *
45  * A collection of objects and methods to assist with handling Ancillary Data
46  * present in Vertical Blanking Interval as well as Closed Caption.
47  */
48 
49 #ifndef GST_DISABLE_GST_DEBUG
50 #define GST_CAT_DEFAULT ensure_debug_category()
51 static GstDebugCategory *
ensure_debug_category(void)52 ensure_debug_category (void)
53 {
54   static gsize cat_gonce = 0;
55 
56   if (g_once_init_enter (&cat_gonce)) {
57     gsize cat_done;
58 
59     cat_done = (gsize) _gst_debug_category_new ("video-anc", 0,
60         "Ancillary data, VBI and CC utilities");
61 
62     g_once_init_leave (&cat_gonce, cat_done);
63   }
64 
65   return (GstDebugCategory *) cat_gonce;
66 }
67 #else
68 #define ensure_debug_category() /* NOOP */
69 #endif /* GST_DISABLE_GST_DEBUG */
70 
71 struct _GstVideoVBIParser
72 {
73   GstVideoInfo info;            /* format of the lines provided */
74   guint8 *work_data;            /* Converted line in planar 16bit format */
75   guint32 work_data_size;       /* Size in bytes of work_data */
76   guint offset;                 /* Current offset (in bytes) in work_data */
77   gboolean bit16;               /* Data is stored as 16bit if TRUE. Else 8bit(without parity) */
78 };
79 
80 G_DEFINE_BOXED_TYPE (GstVideoVBIParser, gst_video_vbi_parser,
81     (GBoxedCopyFunc) gst_video_vbi_parser_copy,
82     (GBoxedFreeFunc) gst_video_vbi_parser_free);
83 
84 GstVideoVBIParser *
gst_video_vbi_parser_copy(const GstVideoVBIParser * parser)85 gst_video_vbi_parser_copy (const GstVideoVBIParser * parser)
86 {
87   GstVideoVBIParser *res;
88 
89   g_return_val_if_fail (parser != NULL, NULL);
90 
91   res = gst_video_vbi_parser_new (GST_VIDEO_INFO_FORMAT (&parser->info),
92       parser->info.width);
93   if (res) {
94     memcpy (res->work_data, parser->work_data, parser->work_data_size);
95   }
96   return res;
97 }
98 
99 /* See SMPTE S291 */
100 static GstVideoVBIParserResult
get_ancillary_16(GstVideoVBIParser * parser,GstVideoAncillary * anc)101 get_ancillary_16 (GstVideoVBIParser * parser, GstVideoAncillary * anc)
102 {
103   gboolean found = FALSE;
104   const guint16 *data = (const guint16 *) parser->work_data;
105 
106   g_return_val_if_fail (parser != NULL, GST_VIDEO_VBI_PARSER_RESULT_ERROR);
107   g_return_val_if_fail (anc != NULL, GST_VIDEO_VBI_PARSER_RESULT_ERROR);
108 
109   /* 3 words are needed at least to detect what kind of packet we look at
110    *
111    * - ADF (SMPTE S291 3.2.1) in case of component ancillary format:
112    *       0x000 0x3ff 0x3ff (followed by DID, SDID)
113    * - ADF (SMPTE S291 3.2.2) in case of composite ancillary format:
114    *       0x3fc DID   SDID
115    */
116   while (parser->offset + 3 < parser->work_data_size) {
117     guint8 DID, SDID, DC;
118     guint i = 0, j;
119     guint checksum = 0;
120     gboolean composite;
121 
122     /* Look for ADF */
123     if (data[parser->offset] == 0x3fc) {
124       /* composite */
125       i += 1;
126       composite = TRUE;
127     } else if (data[parser->offset] == 0x000 &&
128         data[parser->offset + 1] == 0x3ff &&
129         data[parser->offset + 2] == 0x3ff) {
130       /* component */
131       i += 3;
132       composite = FALSE;
133     } else {
134       parser->offset += 1;
135       continue;
136     }
137 
138     /* TODO: Might want to check parity bits here but the checksum in
139      * the end should really be enough */
140 
141     /* 4 words: DID, SDID, DC, [DATA], checksum */
142     if (parser->offset + i + 4 >= parser->work_data_size)
143       goto not_enough_data;
144 
145     /* We have a valid ADF */
146     DID = data[parser->offset + i] & 0xff;
147     SDID = data[parser->offset + i + 1] & 0xff;
148     DC = data[parser->offset + i + 2] & 0xff;
149     i += 3;
150 
151     /* Check if we have enough room to get the User Data and checksum */
152     if (parser->offset + i + DC + 1 >= parser->work_data_size)
153       goto not_enough_data;
154 
155     /* We found a valid ANC \o/ */
156     anc->DID = DID;
157     anc->SDID_block_number = SDID;
158     anc->data_count = DC;
159     memset (anc->data, 0, 256);
160 
161     /* FIXME: We assume here the same data format for the user data as for the
162      * DID/SDID: 10 bits with parity in the upper 2 bits. In theory some
163      * standards could define this differently and even have full 10 bits of
164      * user data but there does not seem to be a single such standard after
165      * all these years.
166      */
167 
168     /* i is at the beginning of the user data now */
169     for (j = 0; j < anc->data_count; j++)
170       anc->data[j] = data[parser->offset + i + j] & 0xff;
171     i += DC;
172 
173     /* Checksum calculation SMPTE S291 3.2.1 */
174     for (j = (composite ? 1 : 3); j < i; j++)
175       checksum += data[parser->offset + j] & 0x1ff;
176     checksum &= 0x1ff;
177     checksum |= (!(checksum >> 8)) << 9;
178 
179     if (checksum != (data[parser->offset + i] & 0x3ff)) {
180       GST_WARNING ("ADF checksum mismatch: expected 0x%03x, got 0x%03x",
181           checksum, (data[parser->offset + i] & 0x3ff));
182       parser->offset += 1;
183       continue;
184     }
185 
186     i += 1;
187 
188     found = TRUE;
189     parser->offset += i;
190     break;
191   }
192 
193   if (found)
194     return GST_VIDEO_VBI_PARSER_RESULT_OK;
195 
196   return GST_VIDEO_VBI_PARSER_RESULT_DONE;
197 
198   /* ERRORS */
199 not_enough_data:
200   {
201     GST_WARNING ("ANC requires more User Data than available line size");
202     /* Avoid further calls to go in the same error */
203     parser->offset = parser->work_data_size;
204     return GST_VIDEO_VBI_PARSER_RESULT_ERROR;
205   }
206 }
207 
208 /* See SMPTE S291 */
209 static GstVideoVBIParserResult
get_ancillary_8(GstVideoVBIParser * parser,GstVideoAncillary * anc)210 get_ancillary_8 (GstVideoVBIParser * parser, GstVideoAncillary * anc)
211 {
212   gboolean found = FALSE;
213   const guint8 *data = parser->work_data;
214 
215   g_return_val_if_fail (parser != NULL, GST_VIDEO_VBI_PARSER_RESULT_ERROR);
216   g_return_val_if_fail (anc != NULL, GST_VIDEO_VBI_PARSER_RESULT_ERROR);
217 
218   /* 3 words are needed at least to detect what kind of packet we look at
219    *
220    * - ADF (SMPTE S291 3.2.1) in case of component ancillary format:
221    *       0x000 0x3ff 0x3ff (followed by DID, SDID)
222    * - ADF (SMPTE S291 3.2.2) in case of composite ancillary format:
223    *       0x3fc DID   SDID
224    */
225   while (parser->offset + 3 < parser->work_data_size) {
226     guint8 DID, SDID, DC;
227     guint i = 0, j;
228     gboolean composite;
229     guint checksum = 0;
230 
231     /* Look for ADF */
232     if (data[parser->offset] == 0xfc) {
233       /* composite */
234       composite = TRUE;
235       i += 1;
236     } else if (data[parser->offset] == 0x00 &&
237         data[parser->offset + 1] == 0xff && data[parser->offset + 2] == 0xff) {
238       /* component */
239       composite = FALSE;
240       i += 3;
241     } else {
242       parser->offset += 1;
243       continue;
244     }
245 
246     /* 4 words: DID, SDID, DC, [DATA], checksum */
247     if (parser->offset + i + 4 >= parser->work_data_size)
248       goto not_enough_data;
249 
250     /* We have a valid ADF */
251     DID = data[parser->offset + i];
252     SDID = data[parser->offset + i + 1];
253     DC = data[parser->offset + i + 2];
254     i += 3;
255 
256     /* Check if we have enough room to get the User Data and checksum */
257     if (parser->offset + i + DC + 1 >= parser->work_data_size)
258       goto not_enough_data;
259 
260     /* We found a valid ANC \o/ */
261     anc->DID = DID;
262     anc->SDID_block_number = SDID;
263     anc->data_count = DC;
264     memset (anc->data, 0, 256);
265 
266     /* i is at the beginning of the user data now */
267     for (j = 0; j < anc->data_count; j++)
268       anc->data[j] = data[parser->offset + i + j] & 0xff;
269     i += DC;
270 
271     /* Checksum calculation SMPTE S291 3.2.1 */
272     for (j = (composite ? 1 : 3); j < i; j++)
273       checksum += data[parser->offset + j];
274     checksum &= 0xff;
275 
276     if (checksum != data[parser->offset + i]) {
277       GST_WARNING ("ADF checksum mismatch: expected 0x%02x, got 0x%02x",
278           checksum, data[parser->offset + i]);
279       parser->offset += 1;
280       continue;
281     }
282 
283     i += 1;
284 
285     found = TRUE;
286     parser->offset += i;
287     break;
288   }
289 
290   if (found)
291     return GST_VIDEO_VBI_PARSER_RESULT_OK;
292 
293   return GST_VIDEO_VBI_PARSER_RESULT_DONE;
294 
295   /* ERRORS */
296 not_enough_data:
297   {
298     GST_WARNING ("ANC requires more User Data than available line size");
299     /* Avoid further calls to go in the same error */
300     parser->offset = parser->work_data_size;
301     return GST_VIDEO_VBI_PARSER_RESULT_ERROR;
302   }
303 }
304 
305 /**
306  * gst_video_vbi_parser_get_ancillary:
307  * @parser: a #GstVideoVBIParser
308  * @anc: (out caller-allocates): a #GstVideoAncillary to start the eventual ancillary data
309  *
310  * Parse the line provided previously by gst_video_vbi_parser_add_line().
311  *
312  * Since: 1.16
313  *
314  * Returns: %GST_VIDEO_VBI_PARSER_RESULT_OK if ancillary data was found and
315  * @anc was filled. %GST_VIDEO_VBI_PARSER_RESULT_DONE if there wasn't any
316  * data.
317  */
318 
319 GstVideoVBIParserResult
gst_video_vbi_parser_get_ancillary(GstVideoVBIParser * parser,GstVideoAncillary * anc)320 gst_video_vbi_parser_get_ancillary (GstVideoVBIParser * parser,
321     GstVideoAncillary * anc)
322 {
323   g_return_val_if_fail (parser != NULL, GST_VIDEO_VBI_PARSER_RESULT_ERROR);
324   g_return_val_if_fail (anc != NULL, GST_VIDEO_VBI_PARSER_RESULT_ERROR);
325 
326   if (parser->bit16)
327     return get_ancillary_16 (parser, anc);
328   return get_ancillary_8 (parser, anc);
329 }
330 
331 /**
332  * gst_video_vbi_parser_new:
333  * @format: a #GstVideoFormat
334  * @pixel_width: The width in pixel to use
335  *
336  * Create a new #GstVideoVBIParser for the specified @format and @pixel_width.
337  *
338  * Since: 1.16
339  *
340  * Returns: The new #GstVideoVBIParser or %NULL if the @format and/or @pixel_width
341  * is not supported.
342  */
343 GstVideoVBIParser *
gst_video_vbi_parser_new(GstVideoFormat format,guint32 pixel_width)344 gst_video_vbi_parser_new (GstVideoFormat format, guint32 pixel_width)
345 {
346   GstVideoVBIParser *parser;
347 
348   g_return_val_if_fail (pixel_width > 0, NULL);
349 
350   switch (format) {
351     case GST_VIDEO_FORMAT_v210:
352       parser = g_new0 (GstVideoVBIParser, 1);
353       parser->bit16 = TRUE;
354       break;
355     case GST_VIDEO_FORMAT_UYVY:
356       parser = g_new0 (GstVideoVBIParser, 1);
357       parser->bit16 = FALSE;
358       break;
359     default:
360       GST_WARNING ("Format not supported by GstVideoVBIParser");
361       return NULL;
362   }
363 
364   gst_video_info_init (&parser->info);
365   if (!gst_video_info_set_format (&parser->info, format, pixel_width, 1)) {
366     GST_ERROR ("Could not create GstVideoInfo");
367     g_free (parser);
368     return NULL;
369   }
370 
371   /* Allocate the workspace which is going to be 2 * pixel_width big
372    *  2 : number of pixels per "component" (we only deal with 4:2:2)
373    * We use 1 or 2 bytes per pixel depending on whether we are internally
374    * working in 8 or 16bit */
375   parser->work_data_size = 2 * pixel_width;
376   if (parser->bit16)
377     parser->work_data = g_malloc0 (parser->work_data_size * 2);
378   else
379     parser->work_data = g_malloc0 (parser->work_data_size);
380   parser->offset = 0;
381 
382   return parser;
383 }
384 
385 /**
386  * gst_video_vbi_parser_free:
387  * @parser: a #GstVideoVBIParser
388  *
389  * Frees the @parser.
390  *
391  * Since: 1.16
392  */
393 void
gst_video_vbi_parser_free(GstVideoVBIParser * parser)394 gst_video_vbi_parser_free (GstVideoVBIParser * parser)
395 {
396   g_return_if_fail (parser != NULL);
397 
398   g_free (parser->work_data);
399   g_free (parser);
400 }
401 
402 static void
convert_line_from_uyvy(GstVideoVBIParser * parser,const guint8 * data)403 convert_line_from_uyvy (GstVideoVBIParser * parser, const guint8 * data)
404 {
405   guint i;
406   guint8 *y = parser->work_data;
407 
408   /* Data is stored differently in SD, making no distinction between Y and UV */
409   if (parser->info.width < 1280) {
410     for (i = 0; i < parser->info.width - 3; i += 4) {
411       *y++ = data[(i / 4) * 4 + 0];
412       *y++ = data[(i / 4) * 4 + 1];
413       *y++ = data[(i / 4) * 4 + 2];
414       *y++ = data[(i / 4) * 4 + 3];
415     }
416   } else {
417     guint8 *uv = y + parser->info.width;
418 
419     for (i = 0; i < parser->info.width - 3; i += 4) {
420       *uv++ = data[(i / 4) * 4 + 0];
421       *y++ = data[(i / 4) * 4 + 1];
422       *uv++ = data[(i / 4) * 4 + 2];
423       *y++ = data[(i / 4) * 4 + 3];
424     }
425   }
426   GST_MEMDUMP ("Converted line", parser->work_data, 128);
427 }
428 
429 static void
gst_info_dump_mem16_line(gchar * linebuf,gsize linebuf_size,const guint16 * mem,gsize mem_offset,gsize mem_size)430 gst_info_dump_mem16_line (gchar * linebuf, gsize linebuf_size,
431     const guint16 * mem, gsize mem_offset, gsize mem_size)
432 {
433   gchar hexstr[50], digitstr[6];
434 
435   if (mem_size > 8)
436     mem_size = 8;
437 
438   hexstr[0] = '\0';
439 
440   if (mem != NULL) {
441     guint i = 0;
442 
443     mem += mem_offset;
444     while (i < mem_size) {
445       g_snprintf (digitstr, sizeof (digitstr), "%04x ", mem[i]);
446       g_strlcat (hexstr, digitstr, sizeof (hexstr));
447       ++i;
448     }
449   }
450 
451   g_snprintf (linebuf, linebuf_size, "%08x: %-48.48s",
452       (guint) mem_offset, hexstr);
453 }
454 
455 static void
convert_line_from_v210(GstVideoVBIParser * parser,const guint8 * data)456 convert_line_from_v210 (GstVideoVBIParser * parser, const guint8 * data)
457 {
458   guint i;
459   guint16 *y = (guint16 *) parser->work_data;
460   guint32 a, b, c, d;
461 
462   /* Data is stored differently in SD, making no distinction between Y and UV */
463   if (parser->info.width < 1280) {
464     /* Convert the line */
465     for (i = 0; i < parser->info.width - 5; i += 6) {
466       a = GST_READ_UINT32_LE (data + (i / 6) * 16 + 0);
467       b = GST_READ_UINT32_LE (data + (i / 6) * 16 + 4);
468       c = GST_READ_UINT32_LE (data + (i / 6) * 16 + 8);
469       d = GST_READ_UINT32_LE (data + (i / 6) * 16 + 12);
470 
471       *y++ = (a >> 0) & 0x3ff;
472       *y++ = (a >> 10) & 0x3ff;
473       *y++ = (a >> 20) & 0x3ff;
474       *y++ = (b >> 0) & 0x3ff;
475 
476       *y++ = (b >> 10) & 0x3ff;
477       *y++ = (b >> 20) & 0x3ff;
478       *y++ = (c >> 0) & 0x3ff;
479       *y++ = (c >> 10) & 0x3ff;
480 
481       *y++ = (c >> 20) & 0x3ff;
482       *y++ = (d >> 0) & 0x3ff;
483       *y++ = (d >> 10) & 0x3ff;
484       *y++ = (d >> 20) & 0x3ff;
485     }
486   } else {
487     guint16 *uv = y + parser->info.width;
488 
489     /* Convert the line */
490     for (i = 0; i < parser->info.width - 5; i += 6) {
491       a = GST_READ_UINT32_LE (data + (i / 6) * 16 + 0);
492       b = GST_READ_UINT32_LE (data + (i / 6) * 16 + 4);
493       c = GST_READ_UINT32_LE (data + (i / 6) * 16 + 8);
494       d = GST_READ_UINT32_LE (data + (i / 6) * 16 + 12);
495 
496       *uv++ = (a >> 0) & 0x3ff;
497       *y++ = (a >> 10) & 0x3ff;
498       *uv++ = (a >> 20) & 0x3ff;
499       *y++ = (b >> 0) & 0x3ff;
500 
501       *uv++ = (b >> 10) & 0x3ff;
502       *y++ = (b >> 20) & 0x3ff;
503       *uv++ = (c >> 0) & 0x3ff;
504       *y++ = (c >> 10) & 0x3ff;
505 
506       *uv++ = (c >> 20) & 0x3ff;
507       *y++ = (d >> 0) & 0x3ff;
508       *uv++ = (d >> 10) & 0x3ff;
509       *y++ = (d >> 20) & 0x3ff;
510     }
511   }
512 
513   if (0) {
514     guint off = 0;
515     gsize length = parser->info.width * 2;
516 
517     GST_TRACE ("--------"
518         "-------------------------------------------------------------------");
519 
520     while (off < length) {
521       gchar buf[128];
522 
523       /* gst_info_dump_mem_line will process 16 bytes (8 16bit chunks) at most */
524       gst_info_dump_mem16_line (buf, sizeof (buf),
525           (guint16 *) parser->work_data, off, length - off);
526       GST_TRACE ("%s", buf);
527       off += 8;
528     }
529     GST_TRACE ("--------"
530         "-------------------------------------------------------------------");
531   }
532 }
533 
534 /**
535  * gst_video_vbi_parser_add_line:
536  * @parser: a #GstVideoVBIParser
537  * @data: (array) (transfer none): The line of data to parse
538  *
539  * Provide a new line of data to the @parser. Call gst_video_vbi_parser_get_ancillary()
540  * to get the Ancillary data that might be present on that line.
541  *
542  * Since: 1.16
543  */
544 void
gst_video_vbi_parser_add_line(GstVideoVBIParser * parser,const guint8 * data)545 gst_video_vbi_parser_add_line (GstVideoVBIParser * parser, const guint8 * data)
546 {
547   g_return_if_fail (parser != NULL);
548   g_return_if_fail (data != NULL);
549 
550   /* Reset offset */
551   parser->offset = 0;
552 
553   switch (GST_VIDEO_INFO_FORMAT (&parser->info)) {
554     case GST_VIDEO_FORMAT_v210:
555       convert_line_from_v210 (parser, data);
556       break;
557     case GST_VIDEO_FORMAT_UYVY:
558       convert_line_from_uyvy (parser, data);
559       break;
560     default:
561       GST_ERROR ("UNSUPPORTED FORMAT !");
562       g_assert_not_reached ();
563       break;
564   }
565 }
566 
567 struct _GstVideoVBIEncoder
568 {
569   GstVideoInfo info;            /* format of the lines provided */
570   guint8 *work_data;            /* Converted line in planar 16bit format */
571   guint32 work_data_size;       /* Size in bytes of work_data */
572   guint offset;                 /* Current offset (in bytes) in work_data */
573   gboolean bit16;               /* Data is stored as 16bit if TRUE. Else 8bit(without parity) */
574 };
575 
576 G_DEFINE_BOXED_TYPE (GstVideoVBIEncoder, gst_video_vbi_encoder,
577     (GBoxedCopyFunc) gst_video_vbi_encoder_copy,
578     (GBoxedFreeFunc) gst_video_vbi_encoder_free);
579 
580 GstVideoVBIEncoder *
gst_video_vbi_encoder_copy(const GstVideoVBIEncoder * encoder)581 gst_video_vbi_encoder_copy (const GstVideoVBIEncoder * encoder)
582 {
583   GstVideoVBIEncoder *res;
584 
585   g_return_val_if_fail (encoder != NULL, NULL);
586 
587   res = gst_video_vbi_encoder_new (GST_VIDEO_INFO_FORMAT (&encoder->info),
588       encoder->info.width);
589   if (res) {
590     memcpy (res->work_data, encoder->work_data, encoder->work_data_size);
591   }
592   return res;
593 }
594 
595 /**
596  * gst_video_vbi_encoder_free:
597  * @encoder: a #GstVideoVBIEncoder
598  *
599  * Frees the @encoder.
600  *
601  * Since: 1.16
602  */
603 void
gst_video_vbi_encoder_free(GstVideoVBIEncoder * encoder)604 gst_video_vbi_encoder_free (GstVideoVBIEncoder * encoder)
605 {
606   g_return_if_fail (encoder != NULL);
607 
608   g_free (encoder->work_data);
609   g_free (encoder);
610 }
611 
612 /**
613  * gst_video_vbi_encoder_new:
614  * @format: a #GstVideoFormat
615  * @pixel_width: The width in pixel to use
616  *
617  * Create a new #GstVideoVBIEncoder for the specified @format and @pixel_width.
618  *
619  * Since: 1.16
620  *
621  * Returns: The new #GstVideoVBIEncoder or %NULL if the @format and/or @pixel_width
622  * is not supported.
623  */
624 GstVideoVBIEncoder *
gst_video_vbi_encoder_new(GstVideoFormat format,guint32 pixel_width)625 gst_video_vbi_encoder_new (GstVideoFormat format, guint32 pixel_width)
626 {
627   GstVideoVBIEncoder *encoder;
628 
629   g_return_val_if_fail (pixel_width > 0, NULL);
630 
631   switch (format) {
632     case GST_VIDEO_FORMAT_v210:
633       encoder = g_new0 (GstVideoVBIEncoder, 1);
634       encoder->bit16 = TRUE;
635       break;
636     case GST_VIDEO_FORMAT_UYVY:
637       encoder = g_new0 (GstVideoVBIEncoder, 1);
638       encoder->bit16 = FALSE;
639       break;
640     default:
641       GST_WARNING ("Format not supported by GstVideoVBIEncoder");
642       return NULL;
643   }
644 
645   gst_video_info_init (&encoder->info);
646   if (!gst_video_info_set_format (&encoder->info, format, pixel_width, 1)) {
647     GST_ERROR ("Could not create GstVideoInfo");
648     g_free (encoder);
649     return NULL;
650   }
651 
652   /* Allocate the workspace which is going to be 2 * pixel_width big
653    *  2 : number of pixels per "component" (we only deal with 4:2:2)
654    * We use 1 or 2 bytes per pixel depending on whether we are internally
655    * working in 8 or 16bit */
656   encoder->work_data_size = 2 * pixel_width;
657   if (encoder->bit16)
658     encoder->work_data = g_malloc0 (encoder->work_data_size * 2);
659   else
660     encoder->work_data = g_malloc0 (encoder->work_data_size);
661   encoder->offset = 0;
662 
663   return encoder;
664 }
665 
666 #if G_GNUC_CHECK_VERSION(3,4)
667 static inline guint
parity(guint8 x)668 parity (guint8 x)
669 {
670   return __builtin_parity (x);
671 }
672 #else
673 static guint
parity(guint8 x)674 parity (guint8 x)
675 {
676   guint count = 0;
677 
678   while (x) {
679     count += x & 1;
680     x >>= 1;
681   }
682 
683   return count & 1;
684 }
685 #endif
686 
687 /* Odd/even parity in the upper two bits */
688 #define SET_WITH_PARITY(buf, val) G_STMT_START { \
689   *(buf) = val; \
690     if (parity (val)) \
691       *(buf) |= 0x100; \
692     else \
693       *(buf) |= 0x200; \
694 } G_STMT_END;
695 
696 /**
697  * gst_video_vbi_encoder_add_ancillary:
698  * @encoder: a #GstVideoVBIEncoder
699  * @composite: %TRUE if composite ADF should be created, component otherwise
700  * @DID: The Data Identifier
701  * @SDID_block_number: The Secondary Data Identifier (if type 2) or the Data
702  *                     Block Number (if type 1)
703  * @data_count: The amount of data (in bytes) in @data (max 255 bytes)
704  * @data: (array length=data_count): The user data content of the Ancillary packet.
705  *    Does not contain the ADF, DID, SDID nor CS.
706  *
707  * Stores Video Ancillary data, according to SMPTE-291M specification.
708  *
709  * Note that the contents of the data are always read as 8bit data (i.e. do not contain
710  * the parity check bits).
711  *
712  * Since: 1.16
713  *
714  * Returns: %TRUE if enough space was left in the current line, %FALSE
715  *          otherwise.
716  */
717 gboolean
gst_video_vbi_encoder_add_ancillary(GstVideoVBIEncoder * encoder,gboolean composite,guint8 DID,guint8 SDID_block_number,const guint8 * data,guint data_count)718 gst_video_vbi_encoder_add_ancillary (GstVideoVBIEncoder * encoder,
719     gboolean composite, guint8 DID, guint8 SDID_block_number,
720     const guint8 * data, guint data_count)
721 {
722   g_return_val_if_fail (encoder != NULL, FALSE);
723   g_return_val_if_fail (data != NULL, FALSE);
724   g_return_val_if_fail (data_count < 256, FALSE);
725 
726   /* Doesn't fit into this line anymore */
727   if (encoder->offset + data_count + (composite ? 5 : 7) >
728       encoder->work_data_size)
729     return FALSE;
730 
731   if (encoder->bit16) {
732     guint16 *work_data = ((guint16 *) encoder->work_data) + encoder->offset;
733     guint i = 0, j;
734     guint checksum = 0;
735 
736     /* Write ADF */
737     if (composite) {
738       work_data[i] = 0x3fc;
739       i += 1;
740     } else {
741       work_data[i] = 0x000;
742       work_data[i + 1] = 0x3ff;
743       work_data[i + 2] = 0x3ff;
744       i += 3;
745     }
746 
747     SET_WITH_PARITY (&work_data[i], DID);
748     SET_WITH_PARITY (&work_data[i + 1], SDID_block_number);
749     SET_WITH_PARITY (&work_data[i + 2], data_count);
750     i += 3;
751 
752     for (j = 0; j < data_count; j++)
753       SET_WITH_PARITY (&work_data[i + j], data[j]);
754     i += data_count;
755 
756     for (j = (composite ? 1 : 3); j < i; j++)
757       checksum += work_data[j];
758     checksum &= 0x1ff;
759     checksum |= (!(checksum >> 8)) << 9;
760 
761     work_data[i] = checksum;
762     i += 1;
763 
764     encoder->offset += i;
765   } else {
766     guint8 *work_data = ((guint8 *) encoder->work_data) + encoder->offset;
767     guint i = 0, j;
768     guint checksum = 0;
769 
770     /* Write ADF */
771     if (composite) {
772       work_data[i] = 0xfc;
773       i += 1;
774     } else {
775       work_data[i] = 0x00;
776       work_data[i + 1] = 0xff;
777       work_data[i + 2] = 0xff;
778       i += 3;
779     }
780 
781     work_data[i] = DID;
782     work_data[i + 1] = SDID_block_number;
783     work_data[i + 2] = data_count;
784     i += 3;
785 
786     for (j = 0; j < data_count; j++)
787       work_data[i + j] = data[j];
788     i += data_count;
789 
790     for (j = (composite ? 1 : 3); j < i; j++)
791       checksum += work_data[j];
792     checksum &= 0xff;
793 
794     work_data[i] = checksum;
795     i += 1;
796 
797     encoder->offset += i;
798   }
799 
800   return TRUE;
801 }
802 
803 static void
convert_line_to_v210(GstVideoVBIEncoder * encoder,guint8 * data)804 convert_line_to_v210 (GstVideoVBIEncoder * encoder, guint8 * data)
805 {
806   guint i;
807   const guint16 *y = (const guint16 *) encoder->work_data;
808   guint32 a, b, c, d;
809 
810   /* Data is stored differently in SD, making no distinction between Y and UV */
811   if (encoder->info.width < 1280) {
812     /* Convert the line */
813     for (i = 0; i < encoder->info.width - 5; i += 6) {
814       a = ((y[0] & 0x3ff) << 0)
815           | ((y[1] & 0x3ff) << 10)
816           | ((y[2] & 0x3ff) << 20);
817       y += 3;
818 
819       b = ((y[0] & 0x3ff) << 0)
820           | ((y[1] & 0x3ff) << 10)
821           | ((y[2] & 0x3ff) << 20);
822       y += 3;
823 
824       c = ((y[0] & 0x3ff) << 0)
825           | ((y[1] & 0x3ff) << 10)
826           | ((y[2] & 0x3ff) << 20);
827       y += 3;
828 
829       d = ((y[0] & 0x3ff) << 0)
830           | ((y[1] & 0x3ff) << 10)
831           | ((y[2] & 0x3ff) << 20);
832       y += 3;
833 
834       GST_WRITE_UINT32_LE (data + (i / 6) * 16 + 0, a);
835       GST_WRITE_UINT32_LE (data + (i / 6) * 16 + 4, b);
836       GST_WRITE_UINT32_LE (data + (i / 6) * 16 + 8, c);
837       GST_WRITE_UINT32_LE (data + (i / 6) * 16 + 12, d);
838     }
839   } else {
840     const guint16 *uv = y + encoder->info.width;
841 
842     /* Convert the line */
843     for (i = 0; i < encoder->info.width - 5; i += 6) {
844       a = ((uv[0] & 0x3ff) << 0)
845           | ((y[0] & 0x3ff) << 10)
846           | ((uv[1] & 0x3ff) << 20);
847       uv += 2;
848       y++;
849 
850       b = ((y[0] & 0x3ff) << 0)
851           | ((uv[0] & 0x3ff) << 10)
852           | ((y[1] & 0x3ff) << 20);
853       y += 2;
854       uv++;
855 
856       c = ((uv[0] & 0x3ff) << 0)
857           | ((y[0] & 0x3ff) << 10)
858           | ((uv[1] & 0x3ff) << 20);
859       uv += 2;
860       y++;
861 
862       d = ((y[0] & 0x3ff) << 0)
863           | ((uv[0] & 0x3ff) << 10)
864           | ((y[1] & 0x3ff) << 20);
865       y += 2;
866       uv++;
867 
868       GST_WRITE_UINT32_LE (data + (i / 6) * 16 + 0, a);
869       GST_WRITE_UINT32_LE (data + (i / 6) * 16 + 4, b);
870       GST_WRITE_UINT32_LE (data + (i / 6) * 16 + 8, c);
871       GST_WRITE_UINT32_LE (data + (i / 6) * 16 + 12, d);
872     }
873   }
874 }
875 
876 static void
convert_line_to_uyvy(GstVideoVBIEncoder * encoder,guint8 * data)877 convert_line_to_uyvy (GstVideoVBIEncoder * encoder, guint8 * data)
878 {
879   guint i;
880   const guint8 *y = encoder->work_data;
881 
882   /* Data is stored differently in SD, making no distinction between Y and UV */
883   if (encoder->info.width < 1280) {
884     for (i = 0; i < encoder->info.width - 3; i += 4) {
885       data[(i / 4) * 4 + 0] = *y++;
886       data[(i / 4) * 4 + 1] = *y++;
887       data[(i / 4) * 4 + 2] = *y++;
888       data[(i / 4) * 4 + 3] = *y++;
889     }
890   } else {
891     const guint8 *uv = y + encoder->info.width;
892 
893     for (i = 0; i < encoder->info.width - 3; i += 4) {
894       data[(i / 4) * 4 + 0] = *uv++;
895       data[(i / 4) * 4 + 1] = *y++;
896       data[(i / 4) * 4 + 2] = *uv++;
897       data[(i / 4) * 4 + 3] = *y++;
898     }
899   }
900 }
901 
902 void
gst_video_vbi_encoder_write_line(GstVideoVBIEncoder * encoder,guint8 * data)903 gst_video_vbi_encoder_write_line (GstVideoVBIEncoder * encoder, guint8 * data)
904 {
905   g_return_if_fail (encoder != NULL);
906   g_return_if_fail (data != NULL);
907 
908   /* nothing to write? just exit early */
909   if (!encoder->offset)
910     return;
911 
912   switch (GST_VIDEO_INFO_FORMAT (&encoder->info)) {
913     case GST_VIDEO_FORMAT_v210:
914       convert_line_to_v210 (encoder, data);
915       break;
916     case GST_VIDEO_FORMAT_UYVY:
917       convert_line_to_uyvy (encoder, data);
918       break;
919     default:
920       GST_ERROR ("UNSUPPORTED FORMAT !");
921       g_assert_not_reached ();
922       break;
923   }
924 
925   encoder->offset = 0;
926   memset (encoder->work_data, 0,
927       encoder->work_data_size * (encoder->bit16 ? 2 : 1));
928 }
929 
930 /* Closed Caption Meta implementation *******************************************/
931 
932 GType
gst_video_caption_meta_api_get_type(void)933 gst_video_caption_meta_api_get_type (void)
934 {
935   static volatile GType type;
936 
937   if (g_once_init_enter (&type)) {
938     static const gchar *tags[] = { NULL };
939     GType _type = gst_meta_api_type_register ("GstVideoCaptionMetaAPI", tags);
940     GST_INFO ("registering");
941     g_once_init_leave (&type, _type);
942   }
943   return type;
944 }
945 
946 
947 static gboolean
gst_video_caption_meta_transform(GstBuffer * dest,GstMeta * meta,GstBuffer * buffer,GQuark type,gpointer data)948 gst_video_caption_meta_transform (GstBuffer * dest, GstMeta * meta,
949     GstBuffer * buffer, GQuark type, gpointer data)
950 {
951   GstVideoCaptionMeta *dmeta, *smeta;
952 
953   /* We always copy over the caption meta */
954   smeta = (GstVideoCaptionMeta *) meta;
955 
956   GST_DEBUG ("copy caption metadata");
957   dmeta =
958       gst_buffer_add_video_caption_meta (dest, smeta->caption_type,
959       smeta->data, smeta->size);
960   if (!dmeta)
961     return FALSE;
962 
963   return TRUE;
964 }
965 
966 static gboolean
gst_video_caption_meta_init(GstMeta * meta,gpointer params,GstBuffer * buffer)967 gst_video_caption_meta_init (GstMeta * meta, gpointer params,
968     GstBuffer * buffer)
969 {
970   GstVideoCaptionMeta *emeta = (GstVideoCaptionMeta *) meta;
971 
972   emeta->caption_type = GST_VIDEO_CAPTION_TYPE_UNKNOWN;
973   emeta->data = NULL;
974   emeta->size = 0;
975 
976   return TRUE;
977 }
978 
979 static void
gst_video_caption_meta_free(GstMeta * meta,GstBuffer * buffer)980 gst_video_caption_meta_free (GstMeta * meta, GstBuffer * buffer)
981 {
982   GstVideoCaptionMeta *emeta = (GstVideoCaptionMeta *) meta;
983 
984   g_free (emeta->data);
985 }
986 
987 const GstMetaInfo *
gst_video_caption_meta_get_info(void)988 gst_video_caption_meta_get_info (void)
989 {
990   static const GstMetaInfo *meta_info = NULL;
991 
992   if (g_once_init_enter ((GstMetaInfo **) & meta_info)) {
993     const GstMetaInfo *mi = gst_meta_register (GST_VIDEO_CAPTION_META_API_TYPE,
994         "GstVideoCaptionMeta",
995         sizeof (GstVideoCaptionMeta),
996         gst_video_caption_meta_init,
997         gst_video_caption_meta_free,
998         gst_video_caption_meta_transform);
999     g_once_init_leave ((GstMetaInfo **) & meta_info, (GstMetaInfo *) mi);
1000   }
1001   return meta_info;
1002 }
1003 
1004 /**
1005  * gst_buffer_add_video_caption_meta:
1006  * @buffer: a #GstBuffer
1007  * @caption_type: The type of Closed Caption to add
1008  * @data: (array length=size) (transfer none): The Closed Caption data
1009  * @size: The size of @data in bytes
1010  *
1011  * Attaches #GstVideoCaptionMeta metadata to @buffer with the given
1012  * parameters.
1013  *
1014  * Returns: (transfer none): the #GstVideoCaptionMeta on @buffer.
1015  *
1016  * Since: 1.16
1017  */
1018 GstVideoCaptionMeta *
gst_buffer_add_video_caption_meta(GstBuffer * buffer,GstVideoCaptionType caption_type,const guint8 * data,gsize size)1019 gst_buffer_add_video_caption_meta (GstBuffer * buffer,
1020     GstVideoCaptionType caption_type, const guint8 * data, gsize size)
1021 {
1022   GstVideoCaptionMeta *meta;
1023 
1024   g_return_val_if_fail (GST_IS_BUFFER (buffer), NULL);
1025   g_return_val_if_fail (data != NULL, NULL);
1026   g_return_val_if_fail (size > 0, NULL);
1027 
1028   switch (caption_type) {
1029     case GST_VIDEO_CAPTION_TYPE_CEA608_RAW:
1030     case GST_VIDEO_CAPTION_TYPE_CEA608_S334_1A:
1031     case GST_VIDEO_CAPTION_TYPE_CEA708_RAW:
1032     case GST_VIDEO_CAPTION_TYPE_CEA708_CDP:
1033       break;
1034     default:
1035       GST_ERROR ("Unknown caption type !");
1036       return NULL;
1037   }
1038   /* FIXME : Add checks for content ? */
1039 
1040   meta = (GstVideoCaptionMeta *) gst_buffer_add_meta (buffer,
1041       GST_VIDEO_CAPTION_META_INFO, NULL);
1042   g_return_val_if_fail (meta != NULL, NULL);
1043 
1044   meta->caption_type = caption_type;
1045   meta->data = g_memdup (data, size);
1046   meta->size = size;
1047 
1048   return meta;
1049 }
1050 
1051 /**
1052  * gst_video_caption_type_from_caps:
1053  * @caps: Fixed #GstCaps to parse
1054  *
1055  * Parses fixed Closed Caption #GstCaps and returns the corresponding caption
1056  * type, or %GST_VIDEO_CAPTION_TYPE_UNKNOWN.
1057  *
1058  * Returns: #GstVideoCaptionType.
1059  *
1060  * Since: 1.16
1061  */
1062 GstVideoCaptionType
gst_video_caption_type_from_caps(const GstCaps * caps)1063 gst_video_caption_type_from_caps (const GstCaps * caps)
1064 {
1065   const GstStructure *s;
1066   const gchar *format;
1067 
1068   g_return_val_if_fail (gst_caps_is_fixed (caps),
1069       GST_VIDEO_CAPTION_TYPE_UNKNOWN);
1070 
1071   s = gst_caps_get_structure (caps, 0);
1072   g_return_val_if_fail (s != NULL, GST_VIDEO_CAPTION_TYPE_UNKNOWN);
1073 
1074   format = gst_structure_get_string (s, "format");
1075   if (gst_structure_has_name (s, "closedcaption/x-cea-608")) {
1076     if (g_strcmp0 (format, "raw") == 0) {
1077       return GST_VIDEO_CAPTION_TYPE_CEA608_RAW;
1078     } else if (g_strcmp0 (format, "s334-1a") == 0) {
1079       return GST_VIDEO_CAPTION_TYPE_CEA608_S334_1A;
1080     }
1081   } else if (gst_structure_has_name (s, "closedcaption/x-cea-708")) {
1082     if (g_strcmp0 (format, "cc_data") == 0) {
1083       return GST_VIDEO_CAPTION_TYPE_CEA708_RAW;
1084     } else if (g_strcmp0 (format, "cdp") == 0) {
1085       return GST_VIDEO_CAPTION_TYPE_CEA708_CDP;
1086     }
1087   }
1088   return GST_VIDEO_CAPTION_TYPE_UNKNOWN;
1089 }
1090 
1091 /**
1092  * gst_video_caption_type_to_caps:
1093  * @type: #GstVideoCaptionType
1094  *
1095  * Creates new caps corresponding to @type.
1096  *
1097  * Returns: (transfer full): new #GstCaps
1098  *
1099  * Since: 1.16
1100  */
1101 GstCaps *
gst_video_caption_type_to_caps(GstVideoCaptionType type)1102 gst_video_caption_type_to_caps (GstVideoCaptionType type)
1103 {
1104   GstCaps *caption_caps;
1105 
1106   g_return_val_if_fail (type != GST_VIDEO_CAPTION_TYPE_UNKNOWN, NULL);
1107 
1108   switch (type) {
1109     case GST_VIDEO_CAPTION_TYPE_CEA608_RAW:
1110       caption_caps = gst_caps_new_simple ("closedcaption/x-cea-608",
1111           "format", G_TYPE_STRING, "raw", NULL);
1112       break;
1113     case GST_VIDEO_CAPTION_TYPE_CEA608_S334_1A:
1114       caption_caps = gst_caps_new_simple ("closedcaption/x-cea-608",
1115           "format", G_TYPE_STRING, "s334-1a", NULL);
1116       break;
1117     case GST_VIDEO_CAPTION_TYPE_CEA708_RAW:
1118       caption_caps = gst_caps_new_simple ("closedcaption/x-cea-708",
1119           "format", G_TYPE_STRING, "cc_data", NULL);
1120       break;
1121     case GST_VIDEO_CAPTION_TYPE_CEA708_CDP:
1122       caption_caps = gst_caps_new_simple ("closedcaption/x-cea-708",
1123           "format", G_TYPE_STRING, "cdp", NULL);
1124       break;
1125     default:
1126       g_return_val_if_reached (NULL);
1127       break;
1128   }
1129 
1130   return caption_caps;
1131 }
1132