1 /*
2 * GStreamer
3 * Copyright (C) 2018 Sebastian Dröge <sebastian@centricular.com>
4 *
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
9 *
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
14 *
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
19 */
20
21
22 #ifdef HAVE_CONFIG_H
23 # include <config.h>
24 #endif
25
26 #include <gst/gst.h>
27 #include <gst/base/base.h>
28 #include <gst/video/video.h>
29 #include <string.h>
30
31 #include "gstcccombiner.h"
32
33 GST_DEBUG_CATEGORY_STATIC (gst_cc_combiner_debug);
34 #define GST_CAT_DEFAULT gst_cc_combiner_debug
35
36 static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
37 GST_PAD_SINK,
38 GST_PAD_ALWAYS,
39 GST_STATIC_CAPS_ANY);
40
41 static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
42 GST_PAD_SRC,
43 GST_PAD_ALWAYS,
44 GST_STATIC_CAPS_ANY);
45
46 static GstStaticPadTemplate captiontemplate =
47 GST_STATIC_PAD_TEMPLATE ("caption",
48 GST_PAD_SINK,
49 GST_PAD_REQUEST,
50 GST_STATIC_CAPS
51 ("closedcaption/x-cea-608,format={ (string) raw, (string) s334-1a}; "
52 "closedcaption/x-cea-708,format={ (string) cc_data, (string) cdp }"));
53
54 #define parent_class gst_cc_combiner_parent_class
55 G_DEFINE_TYPE (GstCCCombiner, gst_cc_combiner, GST_TYPE_AGGREGATOR);
56 GST_ELEMENT_REGISTER_DEFINE (cccombiner, "cccombiner",
57 GST_RANK_NONE, GST_TYPE_CCCOMBINER);
58
59 enum
60 {
61 PROP_0,
62 PROP_SCHEDULE,
63 PROP_MAX_SCHEDULED,
64 };
65
66 #define DEFAULT_MAX_SCHEDULED 30
67 #define DEFAULT_SCHEDULE TRUE
68
69 typedef struct
70 {
71 GstVideoCaptionType caption_type;
72 GstBuffer *buffer;
73 } CaptionData;
74
75 typedef struct
76 {
77 GstBuffer *buffer;
78 GstClockTime running_time;
79 GstClockTime stream_time;
80 } CaptionQueueItem;
81
82 static void
caption_data_clear(CaptionData * data)83 caption_data_clear (CaptionData * data)
84 {
85 gst_buffer_unref (data->buffer);
86 }
87
88 static void
clear_scheduled(CaptionQueueItem * item)89 clear_scheduled (CaptionQueueItem * item)
90 {
91 gst_buffer_unref (item->buffer);
92 }
93
94 static void
gst_cc_combiner_finalize(GObject * object)95 gst_cc_combiner_finalize (GObject * object)
96 {
97 GstCCCombiner *self = GST_CCCOMBINER (object);
98
99 gst_queue_array_free (self->scheduled[0]);
100 gst_queue_array_free (self->scheduled[1]);
101 g_array_unref (self->current_frame_captions);
102 self->current_frame_captions = NULL;
103
104 G_OBJECT_CLASS (parent_class)->finalize (object);
105 }
106
107 #define GST_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS
108
109 static const guint8 *
extract_cdp(const guint8 * cdp,guint cdp_len,guint * cc_data_len)110 extract_cdp (const guint8 * cdp, guint cdp_len, guint * cc_data_len)
111 {
112 GstByteReader br;
113 guint16 u16;
114 guint8 u8;
115 guint8 flags;
116 guint len = 0;
117 const guint8 *cc_data = NULL;
118
119 *cc_data_len = 0;
120
121 /* Header + footer length */
122 if (cdp_len < 11) {
123 goto done;
124 }
125
126 gst_byte_reader_init (&br, cdp, cdp_len);
127 u16 = gst_byte_reader_get_uint16_be_unchecked (&br);
128 if (u16 != 0x9669) {
129 goto done;
130 }
131
132 u8 = gst_byte_reader_get_uint8_unchecked (&br);
133 if (u8 != cdp_len) {
134 goto done;
135 }
136
137 gst_byte_reader_skip_unchecked (&br, 1);
138
139 flags = gst_byte_reader_get_uint8_unchecked (&br);
140
141 /* No cc_data? */
142 if ((flags & 0x40) == 0) {
143 goto done;
144 }
145
146 /* cdp_hdr_sequence_cntr */
147 gst_byte_reader_skip_unchecked (&br, 2);
148
149 /* time_code_present */
150 if (flags & 0x80) {
151 if (gst_byte_reader_get_remaining (&br) < 5) {
152 goto done;
153 }
154 gst_byte_reader_skip_unchecked (&br, 5);
155 }
156
157 /* ccdata_present */
158 if (flags & 0x40) {
159 guint8 cc_count;
160
161 if (gst_byte_reader_get_remaining (&br) < 2) {
162 goto done;
163 }
164 u8 = gst_byte_reader_get_uint8_unchecked (&br);
165 if (u8 != 0x72) {
166 goto done;
167 }
168
169 cc_count = gst_byte_reader_get_uint8_unchecked (&br);
170 if ((cc_count & 0xe0) != 0xe0) {
171 goto done;
172 }
173 cc_count &= 0x1f;
174
175 if (cc_count == 0)
176 return 0;
177
178 len = 3 * cc_count;
179 if (gst_byte_reader_get_remaining (&br) < len)
180 goto done;
181
182 cc_data = gst_byte_reader_get_data_unchecked (&br, len);
183 *cc_data_len = len;
184 }
185
186 done:
187 return cc_data;
188 }
189
190 #define MAX_CDP_PACKET_LEN 256
191 #define MAX_CEA608_LEN 32
192
193 static const struct cdp_fps_entry cdp_fps_table[] = {
194 {0x1f, 24000, 1001, 25, 22, 3 /* FIXME: alternating max cea608 count! */ },
195 {0x2f, 24, 1, 25, 22, 2},
196 {0x3f, 25, 1, 24, 22, 2},
197 {0x4f, 30000, 1001, 20, 18, 2},
198 {0x5f, 30, 1, 20, 18, 2},
199 {0x6f, 50, 1, 12, 11, 1},
200 {0x7f, 60000, 1001, 10, 9, 1},
201 {0x8f, 60, 1, 10, 9, 1},
202 };
203 static const struct cdp_fps_entry null_fps_entry = { 0, 0, 0, 0 };
204
205 static const struct cdp_fps_entry *
cdp_fps_entry_from_fps(guint fps_n,guint fps_d)206 cdp_fps_entry_from_fps (guint fps_n, guint fps_d)
207 {
208 int i;
209 for (i = 0; i < G_N_ELEMENTS (cdp_fps_table); i++) {
210 if (cdp_fps_table[i].fps_n == fps_n && cdp_fps_table[i].fps_d == fps_d)
211 return &cdp_fps_table[i];
212 }
213 return &null_fps_entry;
214 }
215
216
217 static GstBuffer *
make_cdp(GstCCCombiner * self,const guint8 * cc_data,guint cc_data_len,const struct cdp_fps_entry * fps_entry,const GstVideoTimeCode * tc)218 make_cdp (GstCCCombiner * self, const guint8 * cc_data, guint cc_data_len,
219 const struct cdp_fps_entry *fps_entry, const GstVideoTimeCode * tc)
220 {
221 GstByteWriter bw;
222 guint8 flags, checksum;
223 guint i, len;
224 GstBuffer *ret = gst_buffer_new_allocate (NULL, MAX_CDP_PACKET_LEN, NULL);
225 GstMapInfo map;
226
227 gst_buffer_map (ret, &map, GST_MAP_WRITE);
228
229 gst_byte_writer_init_with_data (&bw, map.data, MAX_CDP_PACKET_LEN, FALSE);
230 gst_byte_writer_put_uint16_be_unchecked (&bw, 0x9669);
231 /* Write a length of 0 for now */
232 gst_byte_writer_put_uint8_unchecked (&bw, 0);
233
234 gst_byte_writer_put_uint8_unchecked (&bw, fps_entry->fps_idx);
235
236 /* caption_service_active */
237 flags = 0x02;
238
239 /* ccdata_present */
240 flags |= 0x40;
241
242 if (tc && tc->config.fps_n > 0)
243 flags |= 0x80;
244
245 /* reserved */
246 flags |= 0x01;
247
248 gst_byte_writer_put_uint8_unchecked (&bw, flags);
249
250 gst_byte_writer_put_uint16_be_unchecked (&bw, self->cdp_hdr_sequence_cntr);
251
252 if (tc && tc->config.fps_n > 0) {
253 guint8 u8;
254
255 gst_byte_writer_put_uint8_unchecked (&bw, 0x71);
256 /* reserved 11 - 2 bits */
257 u8 = 0xc0;
258 /* tens of hours - 2 bits */
259 u8 |= ((tc->hours / 10) & 0x3) << 4;
260 /* units of hours - 4 bits */
261 u8 |= (tc->hours % 10) & 0xf;
262 gst_byte_writer_put_uint8_unchecked (&bw, u8);
263
264 /* reserved 1 - 1 bit */
265 u8 = 0x80;
266 /* tens of minutes - 3 bits */
267 u8 |= ((tc->minutes / 10) & 0x7) << 4;
268 /* units of minutes - 4 bits */
269 u8 |= (tc->minutes % 10) & 0xf;
270 gst_byte_writer_put_uint8_unchecked (&bw, u8);
271
272 /* field flag - 1 bit */
273 u8 = tc->field_count < 2 ? 0x00 : 0x80;
274 /* tens of seconds - 3 bits */
275 u8 |= ((tc->seconds / 10) & 0x7) << 4;
276 /* units of seconds - 4 bits */
277 u8 |= (tc->seconds % 10) & 0xf;
278 gst_byte_writer_put_uint8_unchecked (&bw, u8);
279
280 /* drop frame flag - 1 bit */
281 u8 = (tc->config.flags & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME) ? 0x80 :
282 0x00;
283 /* reserved0 - 1 bit */
284 /* tens of frames - 2 bits */
285 u8 |= ((tc->frames / 10) & 0x3) << 4;
286 /* units of frames 4 bits */
287 u8 |= (tc->frames % 10) & 0xf;
288 gst_byte_writer_put_uint8_unchecked (&bw, u8);
289 }
290
291 gst_byte_writer_put_uint8_unchecked (&bw, 0x72);
292 gst_byte_writer_put_uint8_unchecked (&bw, 0xe0 | fps_entry->max_cc_count);
293 gst_byte_writer_put_data_unchecked (&bw, cc_data, cc_data_len);
294 while (fps_entry->max_cc_count > cc_data_len / 3) {
295 gst_byte_writer_put_uint8_unchecked (&bw, 0xfa);
296 gst_byte_writer_put_uint8_unchecked (&bw, 0x00);
297 gst_byte_writer_put_uint8_unchecked (&bw, 0x00);
298 cc_data_len += 3;
299 }
300
301 gst_byte_writer_put_uint8_unchecked (&bw, 0x74);
302 gst_byte_writer_put_uint16_be_unchecked (&bw, self->cdp_hdr_sequence_cntr);
303 self->cdp_hdr_sequence_cntr++;
304 /* We calculate the checksum afterwards */
305 gst_byte_writer_put_uint8_unchecked (&bw, 0);
306
307 len = gst_byte_writer_get_pos (&bw);
308 gst_byte_writer_set_pos (&bw, 2);
309 gst_byte_writer_put_uint8_unchecked (&bw, len);
310
311 checksum = 0;
312 for (i = 0; i < len; i++) {
313 checksum += map.data[i];
314 }
315 checksum &= 0xff;
316 checksum = 256 - checksum;
317 map.data[len - 1] = checksum;
318
319 gst_buffer_unmap (ret, &map);
320
321 gst_buffer_set_size (ret, len);
322
323 return ret;
324 }
325
326 static GstBuffer *
make_padding(GstCCCombiner * self,const GstVideoTimeCode * tc,guint field)327 make_padding (GstCCCombiner * self, const GstVideoTimeCode * tc, guint field)
328 {
329 GstBuffer *ret = NULL;
330
331 switch (self->caption_type) {
332 case GST_VIDEO_CAPTION_TYPE_CEA708_CDP:
333 {
334 const guint8 cc_data[6] = { 0xfc, 0x80, 0x80, 0xf9, 0x80, 0x80 };
335
336 ret = make_cdp (self, cc_data, 6, self->cdp_fps_entry, tc);
337 break;
338 }
339 case GST_VIDEO_CAPTION_TYPE_CEA708_RAW:
340 {
341 GstMapInfo map;
342
343 ret = gst_buffer_new_allocate (NULL, 3, NULL);
344
345 gst_buffer_map (ret, &map, GST_MAP_WRITE);
346
347 map.data[0] = 0xfc | (field & 0x01);
348 map.data[1] = 0x80;
349 map.data[2] = 0x80;
350
351 gst_buffer_unmap (ret, &map);
352 break;
353 }
354 case GST_VIDEO_CAPTION_TYPE_CEA608_S334_1A:
355 {
356 GstMapInfo map;
357
358 ret = gst_buffer_new_allocate (NULL, 3, NULL);
359
360 gst_buffer_map (ret, &map, GST_MAP_WRITE);
361
362 map.data[0] = field == 0 ? 0x80 : 0x00;
363 map.data[1] = 0x80;
364 map.data[2] = 0x80;
365
366 gst_buffer_unmap (ret, &map);
367 break;
368 }
369 case GST_VIDEO_CAPTION_TYPE_CEA608_RAW:
370 {
371 GstMapInfo map;
372
373 ret = gst_buffer_new_allocate (NULL, 2, NULL);
374
375 gst_buffer_map (ret, &map, GST_MAP_WRITE);
376
377 map.data[0] = 0x80;
378 map.data[1] = 0x80;
379
380 gst_buffer_unmap (ret, &map);
381 break;
382 }
383 default:
384 break;
385 }
386
387 return ret;
388 }
389
390 static void
queue_caption(GstCCCombiner * self,GstBuffer * scheduled,guint field)391 queue_caption (GstCCCombiner * self, GstBuffer * scheduled, guint field)
392 {
393 GstAggregatorPad *caption_pad;
394 CaptionQueueItem item;
395
396 if (self->progressive && field == 1) {
397 gst_buffer_unref (scheduled);
398 return;
399 }
400
401 caption_pad =
402 GST_AGGREGATOR_PAD_CAST (gst_element_get_static_pad (GST_ELEMENT_CAST
403 (self), "caption"));
404
405 g_assert (gst_queue_array_get_length (self->scheduled[field]) <=
406 self->max_scheduled);
407
408 if (gst_queue_array_get_length (self->scheduled[field]) ==
409 self->max_scheduled) {
410 CaptionQueueItem *dropped =
411 gst_queue_array_pop_tail_struct (self->scheduled[field]);
412
413 GST_WARNING_OBJECT (self,
414 "scheduled queue runs too long, dropping %" GST_PTR_FORMAT, dropped);
415
416 gst_element_post_message (GST_ELEMENT_CAST (self),
417 gst_message_new_qos (GST_OBJECT_CAST (self), FALSE,
418 dropped->running_time, dropped->stream_time,
419 GST_BUFFER_PTS (dropped->buffer), GST_BUFFER_DURATION (dropped)));
420
421 gst_buffer_unref (dropped->buffer);
422 }
423
424 gst_object_unref (caption_pad);
425
426 item.buffer = scheduled;
427 item.running_time =
428 gst_segment_to_running_time (&caption_pad->segment, GST_FORMAT_TIME,
429 GST_BUFFER_PTS (scheduled));
430 item.stream_time =
431 gst_segment_to_stream_time (&caption_pad->segment, GST_FORMAT_TIME,
432 GST_BUFFER_PTS (scheduled));
433
434 gst_queue_array_push_tail_struct (self->scheduled[field], &item);
435 }
436
437 static void
schedule_cdp(GstCCCombiner * self,const GstVideoTimeCode * tc,const guint8 * data,guint len,GstClockTime pts,GstClockTime duration)438 schedule_cdp (GstCCCombiner * self, const GstVideoTimeCode * tc,
439 const guint8 * data, guint len, GstClockTime pts, GstClockTime duration)
440 {
441 const guint8 *cc_data;
442 guint cc_data_len;
443 gboolean inject = FALSE;
444
445 if ((cc_data = extract_cdp (data, len, &cc_data_len))) {
446 guint8 i;
447
448 for (i = 0; i < cc_data_len / 3; i++) {
449 gboolean cc_valid = (cc_data[i * 3] & 0x04) == 0x04;
450 guint8 cc_type = cc_data[i * 3] & 0x03;
451
452 if (!cc_valid)
453 continue;
454
455 if (cc_type == 0x00 || cc_type == 0x01) {
456 if (cc_data[i * 3 + 1] != 0x80 || cc_data[i * 3 + 2] != 0x80) {
457 inject = TRUE;
458 break;
459 }
460 continue;
461 } else {
462 inject = TRUE;
463 break;
464 }
465 }
466 }
467
468 if (inject) {
469 GstBuffer *buf =
470 make_cdp (self, cc_data, cc_data_len, self->cdp_fps_entry, tc);
471
472 /* We only set those for QoS reporting purposes */
473 GST_BUFFER_PTS (buf) = pts;
474 GST_BUFFER_DURATION (buf) = duration;
475
476 queue_caption (self, buf, 0);
477 }
478 }
479
480 static void
schedule_cea608_s334_1a(GstCCCombiner * self,guint8 * data,guint len,GstClockTime pts,GstClockTime duration)481 schedule_cea608_s334_1a (GstCCCombiner * self, guint8 * data, guint len,
482 GstClockTime pts, GstClockTime duration)
483 {
484 guint8 field0_data[3], field1_data[3];
485 guint field0_len = 0, field1_len = 0;
486 guint i;
487 gboolean field0_608 = FALSE, field1_608 = FALSE;
488
489 if (len % 3 != 0) {
490 GST_WARNING ("Invalid cc_data buffer size %u. Truncating to a multiple "
491 "of 3", len);
492 len = len - (len % 3);
493 }
494
495 for (i = 0; i < len / 3; i++) {
496 if (data[i * 3] & 0x80) {
497 if (field0_608)
498 continue;
499
500 field0_608 = TRUE;
501
502 if (data[i * 3 + 1] == 0x80 && data[i * 3 + 2] == 0x80)
503 continue;
504
505 field0_data[field0_len++] = data[i * 3];
506 field0_data[field0_len++] = data[i * 3 + 1];
507 field0_data[field0_len++] = data[i * 3 + 2];
508 } else {
509 if (field1_608)
510 continue;
511
512 field1_608 = TRUE;
513
514 if (data[i * 3 + 1] == 0x80 && data[i * 3 + 2] == 0x80)
515 continue;
516
517 field1_data[field1_len++] = data[i * 3];
518 field1_data[field1_len++] = data[i * 3 + 1];
519 field1_data[field1_len++] = data[i * 3 + 2];
520 }
521 }
522
523 if (field0_len > 0) {
524 GstBuffer *buf = gst_buffer_new_allocate (NULL, field0_len, NULL);
525
526 gst_buffer_fill (buf, 0, field0_data, field0_len);
527 GST_BUFFER_PTS (buf) = pts;
528 GST_BUFFER_DURATION (buf) = duration;
529
530 queue_caption (self, buf, 0);
531 }
532
533 if (field1_len > 0) {
534 GstBuffer *buf = gst_buffer_new_allocate (NULL, field1_len, NULL);
535
536 gst_buffer_fill (buf, 0, field1_data, field1_len);
537 GST_BUFFER_PTS (buf) = pts;
538 GST_BUFFER_DURATION (buf) = duration;
539
540 queue_caption (self, buf, 1);
541 }
542 }
543
544 static void
schedule_cea708_raw(GstCCCombiner * self,guint8 * data,guint len,GstClockTime pts,GstClockTime duration)545 schedule_cea708_raw (GstCCCombiner * self, guint8 * data, guint len,
546 GstClockTime pts, GstClockTime duration)
547 {
548 guint8 field0_data[MAX_CDP_PACKET_LEN], field1_data[3];
549 guint field0_len = 0, field1_len = 0;
550 guint i;
551 gboolean field0_608 = FALSE, field1_608 = FALSE;
552 gboolean started_ccp = FALSE;
553
554 if (len % 3 != 0) {
555 GST_WARNING ("Invalid cc_data buffer size %u. Truncating to a multiple "
556 "of 3", len);
557 len = len - (len % 3);
558 }
559
560 for (i = 0; i < len / 3; i++) {
561 gboolean cc_valid = (data[i * 3] & 0x04) == 0x04;
562 guint8 cc_type = data[i * 3] & 0x03;
563
564 if (!started_ccp) {
565 if (cc_type == 0x00) {
566 if (!cc_valid)
567 continue;
568
569 if (field0_608)
570 continue;
571
572 field0_608 = TRUE;
573
574 if (data[i * 3 + 1] == 0x80 && data[i * 3 + 2] == 0x80)
575 continue;
576
577 field0_data[field0_len++] = data[i * 3];
578 field0_data[field0_len++] = data[i * 3 + 1];
579 field0_data[field0_len++] = data[i * 3 + 2];
580 } else if (cc_type == 0x01) {
581 if (!cc_valid)
582 continue;
583
584 if (field1_608)
585 continue;
586
587 field1_608 = TRUE;
588
589 if (data[i * 3 + 1] == 0x80 && data[i * 3 + 2] == 0x80)
590 continue;
591
592 field1_data[field1_len++] = data[i * 3];
593 field1_data[field1_len++] = data[i * 3 + 1];
594 field1_data[field1_len++] = data[i * 3 + 2];
595 }
596
597 continue;
598 }
599
600 if (cc_type & 0x10)
601 started_ccp = TRUE;
602
603 if (!cc_valid)
604 continue;
605
606 if (cc_type == 0x00 || cc_type == 0x01)
607 continue;
608
609 field0_data[field0_len++] = data[i * 3];
610 field0_data[field0_len++] = data[i * 3 + 1];
611 field0_data[field0_len++] = data[i * 3 + 2];
612 }
613
614 if (field0_len > 0) {
615 GstBuffer *buf = gst_buffer_new_allocate (NULL, field0_len, NULL);
616
617 gst_buffer_fill (buf, 0, field0_data, field0_len);
618 GST_BUFFER_PTS (buf) = pts;
619 GST_BUFFER_DURATION (buf) = duration;
620
621 queue_caption (self, buf, 0);
622 }
623
624 if (field1_len > 0) {
625 GstBuffer *buf = gst_buffer_new_allocate (NULL, field1_len, NULL);
626
627 gst_buffer_fill (buf, 0, field1_data, field1_len);
628 GST_BUFFER_PTS (buf) = pts;
629 GST_BUFFER_DURATION (buf) = duration;
630
631 queue_caption (self, buf, 1);
632 }
633 }
634
635 static void
schedule_cea608_raw(GstCCCombiner * self,guint8 * data,guint len,GstBuffer * buffer)636 schedule_cea608_raw (GstCCCombiner * self, guint8 * data, guint len,
637 GstBuffer * buffer)
638 {
639 if (len < 2) {
640 return;
641 }
642
643 if (data[0] != 0x80 || data[1] != 0x80) {
644 queue_caption (self, gst_buffer_ref (buffer), 0);
645 }
646 }
647
648
649 static void
schedule_caption(GstCCCombiner * self,GstBuffer * caption_buf,const GstVideoTimeCode * tc)650 schedule_caption (GstCCCombiner * self, GstBuffer * caption_buf,
651 const GstVideoTimeCode * tc)
652 {
653 GstMapInfo map;
654 GstClockTime pts, duration;
655
656 pts = GST_BUFFER_PTS (caption_buf);
657 duration = GST_BUFFER_DURATION (caption_buf);
658
659 gst_buffer_map (caption_buf, &map, GST_MAP_READ);
660
661 switch (self->caption_type) {
662 case GST_VIDEO_CAPTION_TYPE_CEA708_CDP:
663 schedule_cdp (self, tc, map.data, map.size, pts, duration);
664 break;
665 case GST_VIDEO_CAPTION_TYPE_CEA708_RAW:
666 schedule_cea708_raw (self, map.data, map.size, pts, duration);
667 break;
668 case GST_VIDEO_CAPTION_TYPE_CEA608_S334_1A:
669 schedule_cea608_s334_1a (self, map.data, map.size, pts, duration);
670 break;
671 case GST_VIDEO_CAPTION_TYPE_CEA608_RAW:
672 schedule_cea608_raw (self, map.data, map.size, caption_buf);
673 break;
674 default:
675 break;
676 }
677
678 gst_buffer_unmap (caption_buf, &map);
679 }
680
681 static void
dequeue_caption_one_field(GstCCCombiner * self,const GstVideoTimeCode * tc,guint field,gboolean drain)682 dequeue_caption_one_field (GstCCCombiner * self, const GstVideoTimeCode * tc,
683 guint field, gboolean drain)
684 {
685 CaptionQueueItem *scheduled;
686 CaptionData caption_data;
687
688 if ((scheduled = gst_queue_array_pop_head_struct (self->scheduled[field]))) {
689 caption_data.buffer = scheduled->buffer;
690 caption_data.caption_type = self->caption_type;
691 g_array_append_val (self->current_frame_captions, caption_data);
692 } else if (!drain) {
693 caption_data.caption_type = self->caption_type;
694 caption_data.buffer = make_padding (self, tc, field);
695 g_array_append_val (self->current_frame_captions, caption_data);
696 }
697 }
698
699 static void
dequeue_caption_both_fields(GstCCCombiner * self,const GstVideoTimeCode * tc,gboolean drain)700 dequeue_caption_both_fields (GstCCCombiner * self, const GstVideoTimeCode * tc,
701 gboolean drain)
702 {
703 CaptionQueueItem *field0_scheduled, *field1_scheduled;
704 GstBuffer *field0_buffer, *field1_buffer;
705 CaptionData caption_data;
706
707 field0_scheduled = gst_queue_array_pop_head_struct (self->scheduled[0]);
708 field1_scheduled = gst_queue_array_pop_head_struct (self->scheduled[1]);
709
710 if (drain && !field0_scheduled && !field1_scheduled) {
711 return;
712 }
713
714 if (field0_scheduled) {
715 field0_buffer = field0_scheduled->buffer;
716 } else {
717 field0_buffer = make_padding (self, tc, 0);
718 }
719
720 if (field1_scheduled) {
721 field1_buffer = field1_scheduled->buffer;
722 } else {
723 field1_buffer = make_padding (self, tc, 1);
724 }
725
726 caption_data.caption_type = self->caption_type;
727 caption_data.buffer = gst_buffer_append (field0_buffer, field1_buffer);
728
729 g_array_append_val (self->current_frame_captions, caption_data);
730 }
731
732 static GstFlowReturn
gst_cc_combiner_collect_captions(GstCCCombiner * self,gboolean timeout)733 gst_cc_combiner_collect_captions (GstCCCombiner * self, gboolean timeout)
734 {
735 GstAggregatorPad *src_pad =
736 GST_AGGREGATOR_PAD (GST_AGGREGATOR_SRC_PAD (self));
737 GstAggregatorPad *caption_pad;
738 GstBuffer *video_buf;
739 GstVideoTimeCodeMeta *tc_meta;
740 GstVideoTimeCode *tc = NULL;
741 gboolean caption_pad_is_eos = FALSE;
742
743 g_assert (self->current_video_buffer != NULL);
744
745 caption_pad =
746 GST_AGGREGATOR_PAD_CAST (gst_element_get_static_pad (GST_ELEMENT_CAST
747 (self), "caption"));
748 /* No caption pad, forward buffer directly */
749 if (!caption_pad) {
750 GST_LOG_OBJECT (self, "No caption pad, passing through video");
751 video_buf = self->current_video_buffer;
752 gst_aggregator_selected_samples (GST_AGGREGATOR_CAST (self),
753 GST_BUFFER_PTS (video_buf), GST_BUFFER_DTS (video_buf),
754 GST_BUFFER_DURATION (video_buf), NULL);
755 self->current_video_buffer = NULL;
756 goto done;
757 }
758
759 tc_meta = gst_buffer_get_video_time_code_meta (self->current_video_buffer);
760
761 if (tc_meta) {
762 tc = &tc_meta->tc;
763 }
764
765 GST_LOG_OBJECT (self, "Trying to collect captions for queued video buffer");
766 do {
767 GstBuffer *caption_buf;
768 GstClockTime caption_time;
769 CaptionData caption_data;
770
771 caption_buf = gst_aggregator_pad_peek_buffer (caption_pad);
772 if (!caption_buf) {
773 if (gst_aggregator_pad_is_eos (caption_pad)) {
774 GST_DEBUG_OBJECT (self, "Caption pad is EOS, we're done");
775
776 caption_pad_is_eos = TRUE;
777 break;
778 } else if (!timeout) {
779 GST_DEBUG_OBJECT (self, "Need more caption data");
780 gst_object_unref (caption_pad);
781 return GST_FLOW_NEED_DATA;
782 } else {
783 GST_DEBUG_OBJECT (self, "No caption data on timeout");
784 break;
785 }
786 }
787
788 caption_time = GST_BUFFER_PTS (caption_buf);
789 if (!GST_CLOCK_TIME_IS_VALID (caption_time)) {
790 GST_ERROR_OBJECT (self, "Caption buffer without PTS");
791
792 gst_buffer_unref (caption_buf);
793 gst_object_unref (caption_pad);
794
795 return GST_FLOW_ERROR;
796 }
797
798 caption_time =
799 gst_segment_to_running_time (&caption_pad->segment, GST_FORMAT_TIME,
800 caption_time);
801
802 if (!GST_CLOCK_TIME_IS_VALID (caption_time)) {
803 GST_DEBUG_OBJECT (self, "Caption buffer outside segment, dropping");
804
805 gst_aggregator_pad_drop_buffer (caption_pad);
806 gst_buffer_unref (caption_buf);
807
808 continue;
809 }
810
811 if (gst_buffer_get_size (caption_buf) == 0 &&
812 GST_BUFFER_FLAG_IS_SET (caption_buf, GST_BUFFER_FLAG_GAP)) {
813 /* This is a gap, we can go ahead. We only consume it once its end point
814 * is behind the current video running time. Important to note that
815 * we can't deal with gaps with no duration (-1)
816 */
817 if (!GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (caption_buf))) {
818 GST_ERROR_OBJECT (self, "GAP buffer without a duration");
819
820 gst_buffer_unref (caption_buf);
821 gst_object_unref (caption_pad);
822
823 return GST_FLOW_ERROR;
824 }
825
826 gst_buffer_unref (caption_buf);
827
828 if (caption_time + GST_BUFFER_DURATION (caption_buf) <
829 self->current_video_running_time_end) {
830 gst_aggregator_pad_drop_buffer (caption_pad);
831 continue;
832 } else {
833 break;
834 }
835 }
836
837 /* Collected all caption buffers for this video buffer */
838 if (caption_time >= self->current_video_running_time_end) {
839 gst_buffer_unref (caption_buf);
840 break;
841 } else if (!self->schedule) {
842 if (GST_CLOCK_TIME_IS_VALID (self->previous_video_running_time_end)) {
843 if (caption_time < self->previous_video_running_time_end) {
844 GST_WARNING_OBJECT (self,
845 "Caption buffer before end of last video frame, dropping");
846
847 gst_aggregator_pad_drop_buffer (caption_pad);
848 gst_buffer_unref (caption_buf);
849 continue;
850 }
851 } else if (caption_time < self->current_video_running_time) {
852 GST_WARNING_OBJECT (self,
853 "Caption buffer before current video frame, dropping");
854
855 gst_aggregator_pad_drop_buffer (caption_pad);
856 gst_buffer_unref (caption_buf);
857 continue;
858 }
859 }
860
861 /* This caption buffer has to be collected */
862 GST_LOG_OBJECT (self,
863 "Collecting caption buffer %p %" GST_TIME_FORMAT " for video buffer %p",
864 caption_buf, GST_TIME_ARGS (caption_time), self->current_video_buffer);
865
866 caption_data.caption_type = self->caption_type;
867
868 gst_aggregator_pad_drop_buffer (caption_pad);
869
870 if (!self->schedule) {
871 caption_data.buffer = caption_buf;
872 g_array_append_val (self->current_frame_captions, caption_data);
873 } else {
874 schedule_caption (self, caption_buf, tc);
875 gst_buffer_unref (caption_buf);
876 }
877 } while (TRUE);
878
879 /* FIXME pad correctly according to fps */
880 if (self->schedule) {
881 g_assert (self->current_frame_captions->len == 0);
882
883 switch (self->caption_type) {
884 case GST_VIDEO_CAPTION_TYPE_CEA708_CDP:
885 {
886 /* Only relevant in alternate and mixed mode, no need to look at the caps */
887 if (GST_BUFFER_FLAG_IS_SET (self->current_video_buffer,
888 GST_VIDEO_BUFFER_FLAG_INTERLACED)) {
889 if (!GST_VIDEO_BUFFER_IS_BOTTOM_FIELD (self->current_video_buffer)) {
890 dequeue_caption_one_field (self, tc, 0, caption_pad_is_eos);
891 }
892 } else {
893 dequeue_caption_one_field (self, tc, 0, caption_pad_is_eos);
894 }
895 break;
896 }
897 case GST_VIDEO_CAPTION_TYPE_CEA708_RAW:
898 case GST_VIDEO_CAPTION_TYPE_CEA608_S334_1A:
899 {
900 if (self->progressive) {
901 dequeue_caption_one_field (self, tc, 0, caption_pad_is_eos);
902 } else if (GST_BUFFER_FLAG_IS_SET (self->current_video_buffer,
903 GST_VIDEO_BUFFER_FLAG_INTERLACED) &&
904 GST_BUFFER_FLAG_IS_SET (self->current_video_buffer,
905 GST_VIDEO_BUFFER_FLAG_ONEFIELD)) {
906 if (GST_VIDEO_BUFFER_IS_TOP_FIELD (self->current_video_buffer)) {
907 dequeue_caption_one_field (self, tc, 0, caption_pad_is_eos);
908 } else {
909 dequeue_caption_one_field (self, tc, 1, caption_pad_is_eos);
910 }
911 } else {
912 dequeue_caption_both_fields (self, tc, caption_pad_is_eos);
913 }
914 break;
915 }
916 case GST_VIDEO_CAPTION_TYPE_CEA608_RAW:
917 {
918 if (self->progressive) {
919 dequeue_caption_one_field (self, tc, 0, caption_pad_is_eos);
920 } else if (GST_BUFFER_FLAG_IS_SET (self->current_video_buffer,
921 GST_VIDEO_BUFFER_FLAG_INTERLACED)) {
922 if (!GST_VIDEO_BUFFER_IS_BOTTOM_FIELD (self->current_video_buffer)) {
923 dequeue_caption_one_field (self, tc, 0, caption_pad_is_eos);
924 }
925 } else {
926 dequeue_caption_one_field (self, tc, 0, caption_pad_is_eos);
927 }
928 break;
929 }
930 default:
931 break;
932 }
933 }
934
935 gst_aggregator_selected_samples (GST_AGGREGATOR_CAST (self),
936 GST_BUFFER_PTS (self->current_video_buffer),
937 GST_BUFFER_DTS (self->current_video_buffer),
938 GST_BUFFER_DURATION (self->current_video_buffer), NULL);
939
940 GST_LOG_OBJECT (self, "Attaching %u captions to buffer %p",
941 self->current_frame_captions->len, self->current_video_buffer);
942
943 if (self->current_frame_captions->len > 0) {
944 guint i;
945
946 video_buf = gst_buffer_make_writable (self->current_video_buffer);
947 self->current_video_buffer = NULL;
948
949 for (i = 0; i < self->current_frame_captions->len; i++) {
950 CaptionData *caption_data =
951 &g_array_index (self->current_frame_captions, CaptionData, i);
952 GstMapInfo map;
953
954 gst_buffer_map (caption_data->buffer, &map, GST_MAP_READ);
955 gst_buffer_add_video_caption_meta (video_buf, caption_data->caption_type,
956 map.data, map.size);
957 gst_buffer_unmap (caption_data->buffer, &map);
958 }
959
960 g_array_set_size (self->current_frame_captions, 0);
961 } else {
962 GST_LOG_OBJECT (self, "No captions for buffer %p",
963 self->current_video_buffer);
964 video_buf = self->current_video_buffer;
965 self->current_video_buffer = NULL;
966 }
967
968 gst_object_unref (caption_pad);
969
970 done:
971 src_pad->segment.position =
972 GST_BUFFER_PTS (video_buf) + GST_BUFFER_DURATION (video_buf);
973
974 return gst_aggregator_finish_buffer (GST_AGGREGATOR_CAST (self), video_buf);
975 }
976
977 static GstFlowReturn
gst_cc_combiner_aggregate(GstAggregator * aggregator,gboolean timeout)978 gst_cc_combiner_aggregate (GstAggregator * aggregator, gboolean timeout)
979 {
980 GstCCCombiner *self = GST_CCCOMBINER (aggregator);
981 GstFlowReturn flow_ret = GST_FLOW_OK;
982
983 /* If we have no current video buffer, queue one. If we have one but
984 * its end running time is not known yet, try to determine it from the
985 * next video buffer */
986 if (!self->current_video_buffer
987 || !GST_CLOCK_TIME_IS_VALID (self->current_video_running_time_end)) {
988 GstAggregatorPad *video_pad;
989 GstClockTime video_start;
990 GstBuffer *video_buf;
991
992 video_pad =
993 GST_AGGREGATOR_PAD_CAST (gst_element_get_static_pad (GST_ELEMENT_CAST
994 (aggregator), "sink"));
995 video_buf = gst_aggregator_pad_peek_buffer (video_pad);
996 if (!video_buf) {
997 if (gst_aggregator_pad_is_eos (video_pad)) {
998 GST_DEBUG_OBJECT (aggregator, "Video pad is EOS, we're done");
999
1000 /* Assume that this buffer ends where it started +50ms (25fps) and handle it */
1001 if (self->current_video_buffer) {
1002 self->current_video_running_time_end =
1003 self->current_video_running_time + 50 * GST_MSECOND;
1004 flow_ret = gst_cc_combiner_collect_captions (self, timeout);
1005 }
1006
1007 /* If we collected all captions for the remaining video frame we're
1008 * done, otherwise get called another time and go directly into the
1009 * outer branch for finishing the current video frame */
1010 if (flow_ret == GST_FLOW_NEED_DATA)
1011 flow_ret = GST_FLOW_OK;
1012 else
1013 flow_ret = GST_FLOW_EOS;
1014 } else {
1015 flow_ret = GST_FLOW_OK;
1016 }
1017
1018 gst_object_unref (video_pad);
1019 return flow_ret;
1020 }
1021
1022 video_start = GST_BUFFER_PTS (video_buf);
1023 if (!GST_CLOCK_TIME_IS_VALID (video_start)) {
1024 gst_buffer_unref (video_buf);
1025 gst_object_unref (video_pad);
1026
1027 GST_ERROR_OBJECT (aggregator, "Video buffer without PTS");
1028
1029 return GST_FLOW_ERROR;
1030 }
1031
1032 video_start =
1033 gst_segment_to_running_time (&video_pad->segment, GST_FORMAT_TIME,
1034 video_start);
1035 if (!GST_CLOCK_TIME_IS_VALID (video_start)) {
1036 GST_DEBUG_OBJECT (aggregator, "Buffer outside segment, dropping");
1037 gst_aggregator_pad_drop_buffer (video_pad);
1038 gst_buffer_unref (video_buf);
1039 gst_object_unref (video_pad);
1040 return GST_FLOW_OK;
1041 }
1042
1043 if (self->current_video_buffer) {
1044 /* If we already have a video buffer just update the current end running
1045 * time accordingly. That's what was missing and why we got here */
1046 self->current_video_running_time_end = video_start;
1047 gst_buffer_unref (video_buf);
1048 GST_LOG_OBJECT (self,
1049 "Determined end timestamp for video buffer: %p %" GST_TIME_FORMAT
1050 " - %" GST_TIME_FORMAT, self->current_video_buffer,
1051 GST_TIME_ARGS (self->current_video_running_time),
1052 GST_TIME_ARGS (self->current_video_running_time_end));
1053 } else {
1054 /* Otherwise we had no buffer queued currently. Let's do that now
1055 * so that we can collect captions for it */
1056 gst_buffer_replace (&self->current_video_buffer, video_buf);
1057 self->current_video_running_time = video_start;
1058 gst_aggregator_pad_drop_buffer (video_pad);
1059 gst_buffer_unref (video_buf);
1060
1061 if (GST_BUFFER_DURATION_IS_VALID (video_buf)) {
1062 GstClockTime end_time =
1063 GST_BUFFER_PTS (video_buf) + GST_BUFFER_DURATION (video_buf);
1064 if (video_pad->segment.stop != -1 && end_time > video_pad->segment.stop)
1065 end_time = video_pad->segment.stop;
1066 self->current_video_running_time_end =
1067 gst_segment_to_running_time (&video_pad->segment, GST_FORMAT_TIME,
1068 end_time);
1069 } else if (self->video_fps_n != 0 && self->video_fps_d != 0) {
1070 GstClockTime end_time =
1071 GST_BUFFER_PTS (video_buf) + gst_util_uint64_scale_int (GST_SECOND,
1072 self->video_fps_d, self->video_fps_n);
1073 if (video_pad->segment.stop != -1 && end_time > video_pad->segment.stop)
1074 end_time = video_pad->segment.stop;
1075 self->current_video_running_time_end =
1076 gst_segment_to_running_time (&video_pad->segment, GST_FORMAT_TIME,
1077 end_time);
1078 } else {
1079 self->current_video_running_time_end = GST_CLOCK_TIME_NONE;
1080 }
1081
1082 GST_LOG_OBJECT (self,
1083 "Queued new video buffer: %p %" GST_TIME_FORMAT " - %"
1084 GST_TIME_FORMAT, self->current_video_buffer,
1085 GST_TIME_ARGS (self->current_video_running_time),
1086 GST_TIME_ARGS (self->current_video_running_time_end));
1087 }
1088
1089 gst_object_unref (video_pad);
1090 }
1091
1092 /* At this point we have a video buffer queued and can start collecting
1093 * caption buffers for it */
1094 g_assert (self->current_video_buffer != NULL);
1095 g_assert (GST_CLOCK_TIME_IS_VALID (self->current_video_running_time));
1096 g_assert (GST_CLOCK_TIME_IS_VALID (self->current_video_running_time_end));
1097
1098 flow_ret = gst_cc_combiner_collect_captions (self, timeout);
1099
1100 /* Only if we collected all captions we replace the current video buffer
1101 * with NULL and continue with the next one on the next call */
1102 if (flow_ret == GST_FLOW_NEED_DATA) {
1103 flow_ret = GST_FLOW_OK;
1104 } else {
1105 gst_buffer_replace (&self->current_video_buffer, NULL);
1106 self->previous_video_running_time_end =
1107 self->current_video_running_time_end;
1108 self->current_video_running_time = self->current_video_running_time_end =
1109 GST_CLOCK_TIME_NONE;
1110 }
1111
1112 return flow_ret;
1113 }
1114
1115 static gboolean
gst_cc_combiner_sink_event(GstAggregator * aggregator,GstAggregatorPad * agg_pad,GstEvent * event)1116 gst_cc_combiner_sink_event (GstAggregator * aggregator,
1117 GstAggregatorPad * agg_pad, GstEvent * event)
1118 {
1119 GstCCCombiner *self = GST_CCCOMBINER (aggregator);
1120
1121 switch (GST_EVENT_TYPE (event)) {
1122 case GST_EVENT_CAPS:{
1123 GstCaps *caps;
1124 GstStructure *s;
1125
1126 gst_event_parse_caps (event, &caps);
1127 s = gst_caps_get_structure (caps, 0);
1128
1129 if (strcmp (GST_OBJECT_NAME (agg_pad), "caption") == 0) {
1130 GstVideoCaptionType caption_type =
1131 gst_video_caption_type_from_caps (caps);
1132
1133 if (self->caption_type != GST_VIDEO_CAPTION_TYPE_UNKNOWN &&
1134 caption_type != self->caption_type) {
1135 GST_ERROR_OBJECT (self, "Changing caption type is not allowed");
1136
1137 GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, (NULL),
1138 ("Changing caption type is not allowed"));
1139
1140 return FALSE;
1141 }
1142 self->caption_type = caption_type;
1143 } else {
1144 gint fps_n, fps_d;
1145 const gchar *interlace_mode;
1146
1147 fps_n = fps_d = 0;
1148
1149 gst_structure_get_fraction (s, "framerate", &fps_n, &fps_d);
1150
1151 interlace_mode = gst_structure_get_string (s, "interlace-mode");
1152
1153 self->progressive = !interlace_mode
1154 || !g_strcmp0 (interlace_mode, "progressive");
1155
1156 if (fps_n != self->video_fps_n || fps_d != self->video_fps_d) {
1157 GstClockTime latency;
1158
1159 latency = gst_util_uint64_scale (GST_SECOND, fps_d, fps_n);
1160 gst_aggregator_set_latency (aggregator, latency, latency);
1161 }
1162
1163 self->video_fps_n = fps_n;
1164 self->video_fps_d = fps_d;
1165
1166 self->cdp_fps_entry = cdp_fps_entry_from_fps (fps_n, fps_d);
1167
1168 gst_aggregator_set_src_caps (aggregator, caps);
1169 }
1170
1171 break;
1172 }
1173 case GST_EVENT_SEGMENT:{
1174 if (strcmp (GST_OBJECT_NAME (agg_pad), "sink") == 0) {
1175 const GstSegment *segment;
1176
1177 gst_event_parse_segment (event, &segment);
1178 gst_aggregator_update_segment (aggregator, segment);
1179 }
1180 break;
1181 }
1182 default:
1183 break;
1184 }
1185
1186 return GST_AGGREGATOR_CLASS (parent_class)->sink_event (aggregator, agg_pad,
1187 event);
1188 }
1189
1190 static gboolean
gst_cc_combiner_stop(GstAggregator * aggregator)1191 gst_cc_combiner_stop (GstAggregator * aggregator)
1192 {
1193 GstCCCombiner *self = GST_CCCOMBINER (aggregator);
1194
1195 self->video_fps_n = self->video_fps_d = 0;
1196 self->current_video_running_time = self->current_video_running_time_end =
1197 self->previous_video_running_time_end = GST_CLOCK_TIME_NONE;
1198 gst_buffer_replace (&self->current_video_buffer, NULL);
1199
1200 g_array_set_size (self->current_frame_captions, 0);
1201 self->caption_type = GST_VIDEO_CAPTION_TYPE_UNKNOWN;
1202
1203 gst_queue_array_clear (self->scheduled[0]);
1204 gst_queue_array_clear (self->scheduled[1]);
1205 self->cdp_fps_entry = &null_fps_entry;
1206
1207 return TRUE;
1208 }
1209
1210 static GstFlowReturn
gst_cc_combiner_flush(GstAggregator * aggregator)1211 gst_cc_combiner_flush (GstAggregator * aggregator)
1212 {
1213 GstCCCombiner *self = GST_CCCOMBINER (aggregator);
1214 GstAggregatorPad *src_pad =
1215 GST_AGGREGATOR_PAD (GST_AGGREGATOR_SRC_PAD (aggregator));
1216
1217 self->current_video_running_time = self->current_video_running_time_end =
1218 self->previous_video_running_time_end = GST_CLOCK_TIME_NONE;
1219 gst_buffer_replace (&self->current_video_buffer, NULL);
1220
1221 g_array_set_size (self->current_frame_captions, 0);
1222
1223 src_pad->segment.position = GST_CLOCK_TIME_NONE;
1224
1225 self->cdp_hdr_sequence_cntr = 0;
1226 gst_queue_array_clear (self->scheduled[0]);
1227 gst_queue_array_clear (self->scheduled[1]);
1228
1229 return GST_FLOW_OK;
1230 }
1231
1232 static GstAggregatorPad *
gst_cc_combiner_create_new_pad(GstAggregator * aggregator,GstPadTemplate * templ,const gchar * req_name,const GstCaps * caps)1233 gst_cc_combiner_create_new_pad (GstAggregator * aggregator,
1234 GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps)
1235 {
1236 GstCCCombiner *self = GST_CCCOMBINER (aggregator);
1237 GstAggregatorPad *agg_pad;
1238
1239 if (templ->direction != GST_PAD_SINK)
1240 return NULL;
1241
1242 if (templ->presence != GST_PAD_REQUEST)
1243 return NULL;
1244
1245 if (strcmp (templ->name_template, "caption") != 0)
1246 return NULL;
1247
1248 GST_OBJECT_LOCK (self);
1249 agg_pad = g_object_new (GST_TYPE_AGGREGATOR_PAD,
1250 "name", "caption", "direction", GST_PAD_SINK, "template", templ, NULL);
1251 self->caption_type = GST_VIDEO_CAPTION_TYPE_UNKNOWN;
1252 GST_OBJECT_UNLOCK (self);
1253
1254 return agg_pad;
1255 }
1256
1257 static gboolean
gst_cc_combiner_src_query(GstAggregator * aggregator,GstQuery * query)1258 gst_cc_combiner_src_query (GstAggregator * aggregator, GstQuery * query)
1259 {
1260 GstPad *video_sinkpad =
1261 gst_element_get_static_pad (GST_ELEMENT_CAST (aggregator), "sink");
1262 gboolean ret;
1263
1264 switch (GST_QUERY_TYPE (query)) {
1265 case GST_QUERY_POSITION:
1266 case GST_QUERY_DURATION:
1267 case GST_QUERY_URI:
1268 case GST_QUERY_CAPS:
1269 case GST_QUERY_ALLOCATION:
1270 ret = gst_pad_peer_query (video_sinkpad, query);
1271 break;
1272 case GST_QUERY_ACCEPT_CAPS:{
1273 GstCaps *caps;
1274 GstCaps *templ = gst_static_pad_template_get_caps (&srctemplate);
1275
1276 gst_query_parse_accept_caps (query, &caps);
1277 gst_query_set_accept_caps_result (query, gst_caps_is_subset (caps,
1278 templ));
1279 gst_caps_unref (templ);
1280 ret = TRUE;
1281 break;
1282 }
1283 default:
1284 ret = GST_AGGREGATOR_CLASS (parent_class)->src_query (aggregator, query);
1285 break;
1286 }
1287
1288 gst_object_unref (video_sinkpad);
1289
1290 return ret;
1291 }
1292
1293 static gboolean
gst_cc_combiner_sink_query(GstAggregator * aggregator,GstAggregatorPad * aggpad,GstQuery * query)1294 gst_cc_combiner_sink_query (GstAggregator * aggregator,
1295 GstAggregatorPad * aggpad, GstQuery * query)
1296 {
1297 GstPad *video_sinkpad =
1298 gst_element_get_static_pad (GST_ELEMENT_CAST (aggregator), "sink");
1299 GstPad *srcpad = GST_AGGREGATOR_SRC_PAD (aggregator);
1300
1301 gboolean ret;
1302
1303 switch (GST_QUERY_TYPE (query)) {
1304 case GST_QUERY_POSITION:
1305 case GST_QUERY_DURATION:
1306 case GST_QUERY_URI:
1307 case GST_QUERY_ALLOCATION:
1308 if (GST_PAD_CAST (aggpad) == video_sinkpad) {
1309 ret = gst_pad_peer_query (srcpad, query);
1310 } else {
1311 ret =
1312 GST_AGGREGATOR_CLASS (parent_class)->sink_query (aggregator,
1313 aggpad, query);
1314 }
1315 break;
1316 case GST_QUERY_CAPS:
1317 if (GST_PAD_CAST (aggpad) == video_sinkpad) {
1318 ret = gst_pad_peer_query (srcpad, query);
1319 } else {
1320 GstCaps *filter;
1321 GstCaps *templ = gst_static_pad_template_get_caps (&captiontemplate);
1322
1323 gst_query_parse_caps (query, &filter);
1324
1325 if (filter) {
1326 GstCaps *caps =
1327 gst_caps_intersect_full (filter, templ, GST_CAPS_INTERSECT_FIRST);
1328 gst_query_set_caps_result (query, caps);
1329 gst_caps_unref (caps);
1330 } else {
1331 gst_query_set_caps_result (query, templ);
1332 }
1333 gst_caps_unref (templ);
1334 ret = TRUE;
1335 }
1336 break;
1337 case GST_QUERY_ACCEPT_CAPS:
1338 if (GST_PAD_CAST (aggpad) == video_sinkpad) {
1339 ret = gst_pad_peer_query (srcpad, query);
1340 } else {
1341 GstCaps *caps;
1342 GstCaps *templ = gst_static_pad_template_get_caps (&captiontemplate);
1343
1344 gst_query_parse_accept_caps (query, &caps);
1345 gst_query_set_accept_caps_result (query, gst_caps_is_subset (caps,
1346 templ));
1347 gst_caps_unref (templ);
1348 ret = TRUE;
1349 }
1350 break;
1351 default:
1352 ret = GST_AGGREGATOR_CLASS (parent_class)->sink_query (aggregator,
1353 aggpad, query);
1354 break;
1355 }
1356
1357 gst_object_unref (video_sinkpad);
1358
1359 return ret;
1360 }
1361
1362 static GstSample *
gst_cc_combiner_peek_next_sample(GstAggregator * agg,GstAggregatorPad * aggpad)1363 gst_cc_combiner_peek_next_sample (GstAggregator * agg,
1364 GstAggregatorPad * aggpad)
1365 {
1366 GstAggregatorPad *caption_pad, *video_pad;
1367 GstCCCombiner *self = GST_CCCOMBINER (agg);
1368 GstSample *res = NULL;
1369
1370 caption_pad =
1371 GST_AGGREGATOR_PAD_CAST (gst_element_get_static_pad (GST_ELEMENT_CAST
1372 (self), "caption"));
1373 video_pad =
1374 GST_AGGREGATOR_PAD_CAST (gst_element_get_static_pad (GST_ELEMENT_CAST
1375 (self), "sink"));
1376
1377 if (aggpad == caption_pad) {
1378 if (self->current_frame_captions->len > 0) {
1379 GstCaps *caps = gst_pad_get_current_caps (GST_PAD (aggpad));
1380 GstBufferList *buflist = gst_buffer_list_new ();
1381 guint i;
1382
1383 for (i = 0; i < self->current_frame_captions->len; i++) {
1384 CaptionData *caption_data =
1385 &g_array_index (self->current_frame_captions, CaptionData, i);
1386 gst_buffer_list_add (buflist, gst_buffer_ref (caption_data->buffer));
1387 }
1388
1389 res = gst_sample_new (NULL, caps, &aggpad->segment, NULL);
1390 gst_caps_unref (caps);
1391
1392 gst_sample_set_buffer_list (res, buflist);
1393 gst_buffer_list_unref (buflist);
1394 }
1395 } else if (aggpad == video_pad) {
1396 if (self->current_video_buffer) {
1397 GstCaps *caps = gst_pad_get_current_caps (GST_PAD (aggpad));
1398 res = gst_sample_new (self->current_video_buffer,
1399 caps, &aggpad->segment, NULL);
1400 gst_caps_unref (caps);
1401 }
1402 }
1403
1404 if (caption_pad)
1405 gst_object_unref (caption_pad);
1406
1407 if (video_pad)
1408 gst_object_unref (video_pad);
1409
1410 return res;
1411 }
1412
1413 static GstStateChangeReturn
gst_cc_combiner_change_state(GstElement * element,GstStateChange transition)1414 gst_cc_combiner_change_state (GstElement * element, GstStateChange transition)
1415 {
1416 GstCCCombiner *self = GST_CCCOMBINER (element);
1417
1418 switch (transition) {
1419 case GST_STATE_CHANGE_READY_TO_PAUSED:
1420 self->schedule = self->prop_schedule;
1421 self->max_scheduled = self->prop_max_scheduled;
1422 break;
1423 default:
1424 break;
1425 }
1426
1427 return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
1428 }
1429
1430 static void
gst_cc_combiner_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)1431 gst_cc_combiner_set_property (GObject * object, guint prop_id,
1432 const GValue * value, GParamSpec * pspec)
1433 {
1434 GstCCCombiner *self = GST_CCCOMBINER (object);
1435
1436 switch (prop_id) {
1437 case PROP_SCHEDULE:
1438 self->prop_schedule = g_value_get_boolean (value);
1439 break;
1440 case PROP_MAX_SCHEDULED:
1441 self->prop_max_scheduled = g_value_get_uint (value);
1442 break;
1443 default:
1444 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1445 break;
1446 }
1447 }
1448
1449 static void
gst_cc_combiner_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)1450 gst_cc_combiner_get_property (GObject * object, guint prop_id, GValue * value,
1451 GParamSpec * pspec)
1452 {
1453 GstCCCombiner *self = GST_CCCOMBINER (object);
1454
1455 switch (prop_id) {
1456 case PROP_SCHEDULE:
1457 g_value_set_boolean (value, self->prop_schedule);
1458 break;
1459 case PROP_MAX_SCHEDULED:
1460 g_value_set_uint (value, self->prop_max_scheduled);
1461 break;
1462 default:
1463 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1464 break;
1465 }
1466 }
1467
1468 static void
gst_cc_combiner_class_init(GstCCCombinerClass * klass)1469 gst_cc_combiner_class_init (GstCCCombinerClass * klass)
1470 {
1471 GObjectClass *gobject_class;
1472 GstElementClass *gstelement_class;
1473 GstAggregatorClass *aggregator_class;
1474
1475 gobject_class = (GObjectClass *) klass;
1476 gstelement_class = (GstElementClass *) klass;
1477 aggregator_class = (GstAggregatorClass *) klass;
1478
1479 gobject_class->finalize = gst_cc_combiner_finalize;
1480 gobject_class->set_property = gst_cc_combiner_set_property;
1481 gobject_class->get_property = gst_cc_combiner_get_property;
1482
1483 gst_element_class_set_static_metadata (gstelement_class,
1484 "Closed Caption Combiner",
1485 "Filter",
1486 "Combines GstVideoCaptionMeta with video input stream",
1487 "Sebastian Dröge <sebastian@centricular.com>");
1488
1489 /**
1490 * GstCCCombiner:schedule:
1491 *
1492 * Controls whether caption buffers should be smoothly scheduled
1493 * in order to have exactly one per output video buffer.
1494 *
1495 * This can involve rewriting input captions, for example when the
1496 * input is CDP sequence counters are rewritten, time codes are dropped
1497 * and potentially re-injected if the input video frame had a time code
1498 * meta.
1499 *
1500 * Caption buffers may also get split up in order to assign captions to
1501 * the correct field when the input is interlaced.
1502 *
1503 * This can also imply that the input will drift from synchronization,
1504 * when there isn't enough padding in the input stream to catch up. In
1505 * that case the element will start dropping old caption buffers once
1506 * the number of buffers in its internal queue reaches
1507 * #GstCCCombiner:max-scheduled.
1508 *
1509 * When this is set to %FALSE, the behaviour of this element is essentially
1510 * that of a funnel.
1511 *
1512 * Since: 1.20
1513 */
1514 g_object_class_install_property (G_OBJECT_CLASS (klass),
1515 PROP_SCHEDULE, g_param_spec_boolean ("schedule",
1516 "Schedule",
1517 "Schedule caption buffers so that exactly one is output per video frame",
1518 DEFAULT_SCHEDULE,
1519 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
1520 GST_PARAM_MUTABLE_READY));
1521
1522 /**
1523 * GstCCCombiner:max-scheduled:
1524 *
1525 * Controls the number of scheduled buffers after which the element
1526 * will start dropping old buffers from its internal queues. See
1527 * #GstCCCombiner:schedule.
1528 *
1529 * Since: 1.20
1530 */
1531 g_object_class_install_property (G_OBJECT_CLASS (klass),
1532 PROP_MAX_SCHEDULED, g_param_spec_uint ("max-scheduled",
1533 "Max Scheduled",
1534 "Maximum number of buffers to queue for scheduling", 0, G_MAXUINT,
1535 DEFAULT_MAX_SCHEDULED,
1536 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
1537 GST_PARAM_MUTABLE_READY));
1538
1539 gst_element_class_add_static_pad_template_with_gtype (gstelement_class,
1540 &sinktemplate, GST_TYPE_AGGREGATOR_PAD);
1541 gst_element_class_add_static_pad_template_with_gtype (gstelement_class,
1542 &srctemplate, GST_TYPE_AGGREGATOR_PAD);
1543 gst_element_class_add_static_pad_template_with_gtype (gstelement_class,
1544 &captiontemplate, GST_TYPE_AGGREGATOR_PAD);
1545
1546 gstelement_class->change_state =
1547 GST_DEBUG_FUNCPTR (gst_cc_combiner_change_state);
1548
1549 aggregator_class->aggregate = gst_cc_combiner_aggregate;
1550 aggregator_class->stop = gst_cc_combiner_stop;
1551 aggregator_class->flush = gst_cc_combiner_flush;
1552 aggregator_class->create_new_pad = gst_cc_combiner_create_new_pad;
1553 aggregator_class->sink_event = gst_cc_combiner_sink_event;
1554 aggregator_class->negotiate = NULL;
1555 aggregator_class->get_next_time = gst_aggregator_simple_get_next_time;
1556 aggregator_class->src_query = gst_cc_combiner_src_query;
1557 aggregator_class->sink_query = gst_cc_combiner_sink_query;
1558 aggregator_class->peek_next_sample = gst_cc_combiner_peek_next_sample;
1559
1560 GST_DEBUG_CATEGORY_INIT (gst_cc_combiner_debug, "cccombiner",
1561 0, "Closed Caption combiner");
1562 }
1563
1564 static void
gst_cc_combiner_init(GstCCCombiner * self)1565 gst_cc_combiner_init (GstCCCombiner * self)
1566 {
1567 GstPadTemplate *templ;
1568 GstAggregatorPad *agg_pad;
1569
1570 templ = gst_static_pad_template_get (&sinktemplate);
1571 agg_pad = g_object_new (GST_TYPE_AGGREGATOR_PAD,
1572 "name", "sink", "direction", GST_PAD_SINK, "template", templ, NULL);
1573 gst_object_unref (templ);
1574 gst_element_add_pad (GST_ELEMENT_CAST (self), GST_PAD_CAST (agg_pad));
1575
1576 self->current_frame_captions =
1577 g_array_new (FALSE, FALSE, sizeof (CaptionData));
1578 g_array_set_clear_func (self->current_frame_captions,
1579 (GDestroyNotify) caption_data_clear);
1580
1581 self->current_video_running_time = self->current_video_running_time_end =
1582 self->previous_video_running_time_end = GST_CLOCK_TIME_NONE;
1583
1584 self->caption_type = GST_VIDEO_CAPTION_TYPE_UNKNOWN;
1585
1586 self->prop_schedule = DEFAULT_SCHEDULE;
1587 self->prop_max_scheduled = DEFAULT_MAX_SCHEDULED;
1588 self->scheduled[0] =
1589 gst_queue_array_new_for_struct (sizeof (CaptionQueueItem), 0);
1590 self->scheduled[1] =
1591 gst_queue_array_new_for_struct (sizeof (CaptionQueueItem), 0);
1592 gst_queue_array_set_clear_func (self->scheduled[0],
1593 (GDestroyNotify) clear_scheduled);
1594 gst_queue_array_set_clear_func (self->scheduled[1],
1595 (GDestroyNotify) clear_scheduled);
1596 self->cdp_hdr_sequence_cntr = 0;
1597 self->cdp_fps_entry = &null_fps_entry;
1598 }
1599