1 /* GStreamer
2 * Copyright (C) 2015 Samsung Electronics Co., Ltd.
3 * @Author: Chengjun Wang <cjun.wang@samsung.com>
4 *
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
9 *
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
14 *
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
19 */
20
21 #ifdef HAVE_CONFIG_H
22 #include <config.h>
23 #endif
24
25 #include <gst/video/video.h>
26 #include <gst/video/gstvideometa.h>
27 #include <gst/base/gstbytereader.h>
28
29 #include "gstceaccoverlay.h"
30 #include <string.h>
31
32
33 #define GST_CAT_DEFAULT gst_cea_cc_overlay_debug
34 GST_DEBUG_CATEGORY (gst_cea_cc_overlay_debug);
35
36
37 #define DEFAULT_PROP_FONT_DESC ""
38 #define DEFAULT_PROP_SILENT FALSE
39 #define DEFAULT_PROP_SERVICE_NUMBER 1
40 #define DEFAULT_PROP_WINDOW_H_POS GST_CEA_CC_OVERLAY_WIN_H_CENTER
41
42 enum
43 {
44 PROP_0,
45 PROP_FONT_DESC,
46 PROP_SILENT,
47 PROP_SERVICE_NUMBER,
48 PROP_WINDOW_H_POS,
49 PROP_LAST
50 };
51
52 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
53 # define CAIRO_ARGB_A 3
54 # define CAIRO_ARGB_R 2
55 # define CAIRO_ARGB_G 1
56 # define CAIRO_ARGB_B 0
57 #else
58 # define CAIRO_ARGB_A 0
59 # define CAIRO_ARGB_R 1
60 # define CAIRO_ARGB_G 2
61 # define CAIRO_ARGB_B 3
62 #endif
63
64 #define CAIRO_UNPREMULTIPLY(a,r,g,b) G_STMT_START { \
65 b = (a > 0) ? MIN ((b * 255 + a / 2) / a, 255) : 0; \
66 g = (a > 0) ? MIN ((g * 255 + a / 2) / a, 255) : 0; \
67 r = (a > 0) ? MIN ((r * 255 + a / 2) / a, 255) : 0; \
68 } G_STMT_END
69
70
71 #define VIDEO_FORMATS GST_VIDEO_OVERLAY_COMPOSITION_BLEND_FORMATS
72
73 #define CC_OVERLAY_CAPS GST_VIDEO_CAPS_MAKE (VIDEO_FORMATS)
74
75 #define CC_OVERLAY_ALL_CAPS CC_OVERLAY_CAPS ";" \
76 GST_VIDEO_CAPS_MAKE_WITH_FEATURES ("ANY", GST_VIDEO_FORMATS_ALL)
77
78 static GstStaticCaps sw_template_caps = GST_STATIC_CAPS (CC_OVERLAY_CAPS);
79
80 static GstStaticPadTemplate src_template_factory =
81 GST_STATIC_PAD_TEMPLATE ("src",
82 GST_PAD_SRC,
83 GST_PAD_ALWAYS,
84 GST_STATIC_CAPS (CC_OVERLAY_ALL_CAPS)
85 );
86
87 static GstStaticPadTemplate video_sink_template_factory =
88 GST_STATIC_PAD_TEMPLATE ("video_sink",
89 GST_PAD_SINK,
90 GST_PAD_ALWAYS,
91 GST_STATIC_CAPS (CC_OVERLAY_ALL_CAPS)
92 );
93
94 static GstStaticPadTemplate cc_sink_template_factory =
95 GST_STATIC_PAD_TEMPLATE ("cc_sink",
96 GST_PAD_SINK,
97 GST_PAD_ALWAYS,
98 GST_STATIC_CAPS
99 ("closedcaption/x-cea-708, format={ (string) cdp, (string) cc_data }")
100 );
101
102
103 #define GST_TYPE_CC_OVERLAY_WIN_H_POS (gst_cea_cc_overlay_h_pos_get_type())
104 static GType
gst_cea_cc_overlay_h_pos_get_type(void)105 gst_cea_cc_overlay_h_pos_get_type (void)
106 {
107 static GType cc_overlay_win_h_pos_type = 0;
108 static const GEnumValue cc_overlay_win_h_pos[] = {
109 {GST_CEA_CC_OVERLAY_WIN_H_LEFT, "left", "left"},
110 {GST_CEA_CC_OVERLAY_WIN_H_CENTER, "center", "center"},
111 {GST_CEA_CC_OVERLAY_WIN_H_RIGHT, "right", "right"},
112 {GST_CEA_CC_OVERLAY_WIN_H_AUTO, "auto", "auto"},
113 {0, NULL, NULL},
114 };
115
116 if (!cc_overlay_win_h_pos_type) {
117 cc_overlay_win_h_pos_type =
118 g_enum_register_static ("GstCeaCcOverlayWinHPos", cc_overlay_win_h_pos);
119 }
120 return cc_overlay_win_h_pos_type;
121 }
122
123
124 #define GST_CEA_CC_OVERLAY_GET_LOCK(ov) (&GST_CEA_CC_OVERLAY (ov)->lock)
125 #define GST_CEA_CC_OVERLAY_GET_COND(ov) (&GST_CEA_CC_OVERLAY (ov)->cond)
126 #define GST_CEA_CC_OVERLAY_LOCK(ov) (g_mutex_lock (GST_CEA_CC_OVERLAY_GET_LOCK (ov)))
127 #define GST_CEA_CC_OVERLAY_UNLOCK(ov) (g_mutex_unlock (GST_CEA_CC_OVERLAY_GET_LOCK (ov)))
128 #define GST_CEA_CC_OVERLAY_WAIT(ov) (g_cond_wait (GST_CEA_CC_OVERLAY_GET_COND (ov), GST_CEA_CC_OVERLAY_GET_LOCK (ov)))
129 #define GST_CEA_CC_OVERLAY_SIGNAL(ov) (g_cond_signal (GST_CEA_CC_OVERLAY_GET_COND (ov)))
130 #define GST_CEA_CC_OVERLAY_BROADCAST(ov)(g_cond_broadcast (GST_CEA_CC_OVERLAY_GET_COND (ov)))
131
132 static GstElementClass *parent_class = NULL;
133 static void gst_base_cea_cc_overlay_base_init (gpointer g_class);
134 static void gst_base_cea_cc_overlay_class_init (GstCeaCcOverlayClass * klass);
135 static void gst_base_cea_cc_overlay_init (GstCeaCcOverlay * overlay,
136 GstCeaCcOverlayClass * klass);
137 static GstStateChangeReturn gst_cea_cc_overlay_change_state (GstElement *
138 element, GstStateChange transition);
139 static GstCaps *gst_cea_cc_overlay_get_videosink_caps (GstPad * pad,
140 GstCeaCcOverlay * overlay, GstCaps * filter);
141 static GstCaps *gst_cea_cc_overlay_get_src_caps (GstPad * pad,
142 GstCeaCcOverlay * overlay, GstCaps * filter);
143 static gboolean gst_cea_cc_overlay_setcaps (GstCeaCcOverlay * overlay,
144 GstCaps * caps);
145 static gboolean gst_cea_cc_overlay_src_event (GstPad * pad, GstObject * parent,
146 GstEvent * event);
147 static gboolean gst_cea_cc_overlay_src_query (GstPad * pad, GstObject * parent,
148 GstQuery * query);
149
150 static gboolean gst_cea_cc_overlay_video_event (GstPad * pad,
151 GstObject * parent, GstEvent * event);
152 static gboolean gst_cea_cc_overlay_video_query (GstPad * pad,
153 GstObject * parent, GstQuery * query);
154 static GstFlowReturn gst_cea_cc_overlay_video_chain (GstPad * pad,
155 GstObject * parent, GstBuffer * buffer);
156
157 static gboolean gst_cea_cc_overlay_cc_event (GstPad * pad,
158 GstObject * parent, GstEvent * event);
159 static GstFlowReturn gst_cea_cc_overlay_cc_chain (GstPad * pad,
160 GstObject * parent, GstBuffer * buffer);
161 static GstPadLinkReturn gst_cea_cc_overlay_cc_pad_link (GstPad * pad,
162 GstObject * parent, GstPad * peer);
163 static void gst_cea_cc_overlay_cc_pad_unlink (GstPad * pad, GstObject * parent);
164 static void gst_cea_cc_overlay_pop_text (GstCeaCcOverlay * overlay);
165 static void gst_cea_cc_overlay_finalize (GObject * object);
166 static void gst_cea_cc_overlay_set_property (GObject * object, guint prop_id,
167 const GValue * value, GParamSpec * pspec);
168 static void gst_cea_cc_overlay_get_property (GObject * object, guint prop_id,
169 GValue * value, GParamSpec * pspec);
170
171 static gboolean gst_cea_cc_overlay_can_handle_caps (GstCaps * incaps);
172
173 GType
gst_cea_cc_overlay_get_type(void)174 gst_cea_cc_overlay_get_type (void)
175 {
176 static GType type = 0;
177
178 if (g_once_init_enter ((gsize *) & type)) {
179 static const GTypeInfo info = {
180 sizeof (GstCeaCcOverlayClass),
181 (GBaseInitFunc) gst_base_cea_cc_overlay_base_init,
182 NULL,
183 (GClassInitFunc) gst_base_cea_cc_overlay_class_init,
184 NULL,
185 NULL,
186 sizeof (GstCeaCcOverlay),
187 0,
188 (GInstanceInitFunc) gst_base_cea_cc_overlay_init,
189 };
190
191 g_once_init_leave ((gsize *) & type,
192 g_type_register_static (GST_TYPE_ELEMENT, "GstCeaCcOverlay", &info, 0));
193 }
194
195 return type;
196 }
197
198 GST_ELEMENT_REGISTER_DEFINE (cc708overlay, "cc708overlay",
199 GST_RANK_PRIMARY, GST_TYPE_CEA_CC_OVERLAY);
200
201 static void
gst_base_cea_cc_overlay_base_init(gpointer g_class)202 gst_base_cea_cc_overlay_base_init (gpointer g_class)
203 {
204 GstCeaCcOverlayClass *klass = GST_CEA_CC_OVERLAY_CLASS (g_class);
205 PangoFontMap *fontmap;
206
207 /* Only lock for the subclasses here, the base class
208 * doesn't have this mutex yet and it's not necessary
209 * here */
210 /* FIXME : Not needed anymore since pango 1.32.6 ! */
211 if (klass->pango_lock)
212 g_mutex_lock (klass->pango_lock);
213 fontmap = pango_cairo_font_map_get_default ();
214 klass->pango_context =
215 pango_font_map_create_context (PANGO_FONT_MAP (fontmap));
216 if (klass->pango_lock)
217 g_mutex_unlock (klass->pango_lock);
218
219 }
220
221 static void
gst_base_cea_cc_overlay_class_init(GstCeaCcOverlayClass * klass)222 gst_base_cea_cc_overlay_class_init (GstCeaCcOverlayClass * klass)
223 {
224 GObjectClass *gobject_class;
225 GstElementClass *gstelement_class;
226
227 gobject_class = (GObjectClass *) klass;
228 gstelement_class = (GstElementClass *) klass;
229
230 GST_DEBUG_CATEGORY_INIT (gst_cea_cc_overlay_debug, "cc708overlay", 0,
231 "cc708overlay");
232
233 parent_class = g_type_class_peek_parent (klass);
234
235 gobject_class->finalize = gst_cea_cc_overlay_finalize;
236 gobject_class->set_property = gst_cea_cc_overlay_set_property;
237 gobject_class->get_property = gst_cea_cc_overlay_get_property;
238
239 gst_element_class_add_pad_template (gstelement_class,
240 gst_static_pad_template_get (&src_template_factory));
241 gst_element_class_add_pad_template (gstelement_class,
242 gst_static_pad_template_get (&video_sink_template_factory));
243 gst_element_class_add_pad_template (gstelement_class,
244 gst_static_pad_template_get (&cc_sink_template_factory));
245
246 gstelement_class->change_state =
247 GST_DEBUG_FUNCPTR (gst_cea_cc_overlay_change_state);
248
249 klass->pango_lock = g_slice_new (GMutex);
250 g_mutex_init (klass->pango_lock);
251
252 g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_SERVICE_NUMBER,
253 g_param_spec_int ("service-number", "service-number",
254 "Service number. Service 1 is designated as the Primary Caption Service,"
255 " Service 2 is the Secondary Language Service.",
256 -1, 63, DEFAULT_PROP_SERVICE_NUMBER,
257 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
258
259 g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_WINDOW_H_POS,
260 g_param_spec_enum ("window-h-pos", "window-h-pos",
261 "Window's Horizontal position", GST_TYPE_CC_OVERLAY_WIN_H_POS,
262 DEFAULT_PROP_WINDOW_H_POS,
263 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
264
265 g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_FONT_DESC,
266 g_param_spec_string ("font-desc", "font description",
267 "Pango font description of font to be used for rendering.\n"
268 "See documentation of pango_font_description_from_string for syntax.\n"
269 "this will override closed caption stream specified font style/pen size.",
270 DEFAULT_PROP_FONT_DESC, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
271
272 /**
273 * GstCeaCcOverlay:silent:
274 *
275 * If set, no text is rendered. Useful to switch off text rendering
276 * temporarily without removing the textoverlay element from the pipeline.
277 */
278 /* FIXME 0.11: rename to "visible" or "text-visible" or "render-text" */
279 g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_SILENT,
280 g_param_spec_boolean ("silent", "silent",
281 "Whether to render the text string",
282 DEFAULT_PROP_SILENT,
283 G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
284
285 gst_element_class_set_static_metadata (gstelement_class,
286 "Closed Caption overlay", "Mixer/Video/Overlay/Subtitle",
287 "Decode cea608/cea708 data and overlay on proper position of a video buffer",
288 "Chengjun Wang <cjun.wang@samsung.com>");
289 gst_cea708_decoder_init_debug ();
290
291 gst_type_mark_as_plugin_api (GST_TYPE_CC_OVERLAY_WIN_H_POS, 0);
292
293 }
294
295 static void
gst_cea_cc_overlay_finalize(GObject * object)296 gst_cea_cc_overlay_finalize (GObject * object)
297 {
298 GstCeaCcOverlay *overlay = GST_CEA_CC_OVERLAY (object);
299
300 if (overlay->current_composition) {
301 gst_video_overlay_composition_unref (overlay->current_composition);
302 overlay->current_composition = NULL;
303 }
304 if (overlay->next_composition) {
305 gst_video_overlay_composition_unref (overlay->next_composition);
306 overlay->next_composition = NULL;
307 }
308
309 gst_cea708dec_free (overlay->decoder);
310 overlay->decoder = NULL;
311
312 g_mutex_clear (&overlay->lock);
313 g_cond_clear (&overlay->cond);
314
315 G_OBJECT_CLASS (parent_class)->finalize (object);
316 }
317
318 static void
gst_base_cea_cc_overlay_init(GstCeaCcOverlay * overlay,GstCeaCcOverlayClass * klass)319 gst_base_cea_cc_overlay_init (GstCeaCcOverlay * overlay,
320 GstCeaCcOverlayClass * klass)
321 {
322 GstPadTemplate *template;
323 overlay->decoder = gst_cea708dec_create (GST_CEA_CC_OVERLAY_GET_CLASS
324 (overlay)->pango_context);
325
326 /* video sink */
327 template = gst_static_pad_template_get (&video_sink_template_factory);
328 overlay->video_sinkpad = gst_pad_new_from_template (template, "video_sink");
329 gst_object_unref (template);
330 gst_pad_set_event_function (overlay->video_sinkpad,
331 GST_DEBUG_FUNCPTR (gst_cea_cc_overlay_video_event));
332 gst_pad_set_chain_function (overlay->video_sinkpad,
333 GST_DEBUG_FUNCPTR (gst_cea_cc_overlay_video_chain));
334 gst_pad_set_query_function (overlay->video_sinkpad,
335 GST_DEBUG_FUNCPTR (gst_cea_cc_overlay_video_query));
336 GST_PAD_SET_PROXY_ALLOCATION (overlay->video_sinkpad);
337 gst_element_add_pad (GST_ELEMENT (overlay), overlay->video_sinkpad);
338
339 template =
340 gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "cc_sink");
341 if (template) {
342 /* text sink */
343 overlay->cc_sinkpad = gst_pad_new_from_template (template, "cc_sink");
344
345 gst_pad_set_event_function (overlay->cc_sinkpad,
346 GST_DEBUG_FUNCPTR (gst_cea_cc_overlay_cc_event));
347 gst_pad_set_chain_function (overlay->cc_sinkpad,
348 GST_DEBUG_FUNCPTR (gst_cea_cc_overlay_cc_chain));
349 gst_pad_set_link_function (overlay->cc_sinkpad,
350 GST_DEBUG_FUNCPTR (gst_cea_cc_overlay_cc_pad_link));
351 gst_pad_set_unlink_function (overlay->cc_sinkpad,
352 GST_DEBUG_FUNCPTR (gst_cea_cc_overlay_cc_pad_unlink));
353 gst_element_add_pad (GST_ELEMENT (overlay), overlay->cc_sinkpad);
354 }
355
356 /* (video) source */
357 template = gst_static_pad_template_get (&src_template_factory);
358 overlay->srcpad = gst_pad_new_from_template (template, "src");
359 gst_object_unref (template);
360 gst_pad_set_event_function (overlay->srcpad,
361 GST_DEBUG_FUNCPTR (gst_cea_cc_overlay_src_event));
362 gst_pad_set_query_function (overlay->srcpad,
363 GST_DEBUG_FUNCPTR (gst_cea_cc_overlay_src_query));
364 gst_element_add_pad (GST_ELEMENT (overlay), overlay->srcpad);
365
366
367 overlay->silent = DEFAULT_PROP_SILENT;
368 overlay->need_update = TRUE;
369 overlay->current_composition = NULL;
370 overlay->next_composition = NULL;
371 overlay->cc_pad_linked = FALSE;
372 overlay->current_comp_start_time = GST_CLOCK_TIME_NONE;
373 overlay->next_comp_start_time = GST_CLOCK_TIME_NONE;
374 overlay->cea608_index[0] = 0;
375 overlay->cea608_index[1] = 0;
376 overlay->cea708_index = 0;
377 overlay->default_window_h_pos = DEFAULT_PROP_WINDOW_H_POS;
378
379 g_mutex_init (&overlay->lock);
380 g_cond_init (&overlay->cond);
381 gst_segment_init (&overlay->segment, GST_FORMAT_TIME);
382 }
383
384 /* only negotiate/query video overlay composition support for now */
385 static gboolean
gst_cea_cc_overlay_negotiate(GstCeaCcOverlay * overlay,GstCaps * caps)386 gst_cea_cc_overlay_negotiate (GstCeaCcOverlay * overlay, GstCaps * caps)
387 {
388 GstQuery *query;
389 gboolean attach = FALSE;
390 gboolean caps_has_meta = TRUE;
391 gboolean ret;
392 GstCapsFeatures *f;
393 GstCaps *original_caps;
394 gboolean original_has_meta = FALSE;
395 gboolean allocation_ret = TRUE;
396
397 GST_DEBUG_OBJECT (overlay, "performing negotiation");
398
399 if (!caps)
400 caps = gst_pad_get_current_caps (overlay->video_sinkpad);
401 else
402 gst_caps_ref (caps);
403
404 if (!caps || gst_caps_is_empty (caps))
405 goto no_format;
406
407 original_caps = caps;
408
409 /* Try to use the overlay meta if possible */
410 f = gst_caps_get_features (caps, 0);
411
412 /* if the caps doesn't have the overlay meta, we query if downstream
413 * accepts it before trying the version without the meta
414 * If upstream already is using the meta then we can only use it */
415 if (!f
416 || !gst_caps_features_contains (f,
417 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION)) {
418 GstCaps *overlay_caps;
419
420 /* In this case we added the meta, but we can work without it
421 * so preserve the original caps so we can use it as a fallback */
422 overlay_caps = gst_caps_copy (caps);
423
424 f = gst_caps_get_features (overlay_caps, 0);
425 gst_caps_features_add (f,
426 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION);
427
428 ret = gst_pad_peer_query_accept_caps (overlay->srcpad, overlay_caps);
429 GST_DEBUG_OBJECT (overlay, "Downstream accepts the overlay meta: %d", ret);
430 if (ret) {
431 gst_caps_unref (caps);
432 caps = overlay_caps;
433
434 } else {
435 /* fallback to the original */
436 gst_caps_unref (overlay_caps);
437 caps_has_meta = FALSE;
438 }
439 } else {
440 original_has_meta = TRUE;
441 }
442 GST_DEBUG_OBJECT (overlay, "Using caps %" GST_PTR_FORMAT, caps);
443 ret = gst_pad_set_caps (overlay->srcpad, caps);
444
445 if (ret) {
446 /* find supported meta */
447 query = gst_query_new_allocation (caps, FALSE);
448
449 if (!gst_pad_peer_query (overlay->srcpad, query)) {
450 /* no problem, we use the query defaults */
451 GST_DEBUG_OBJECT (overlay, "ALLOCATION query failed");
452 allocation_ret = FALSE;
453 }
454
455 if (caps_has_meta && gst_query_find_allocation_meta (query,
456 GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE, NULL))
457 attach = TRUE;
458 gst_query_unref (query);
459 }
460
461 overlay->attach_compo_to_buffer = attach;
462
463 if (!allocation_ret && overlay->video_flushing) {
464 ret = FALSE;
465 } else if (original_caps && !original_has_meta && !attach) {
466 if (caps_has_meta) {
467 /* Some elements (fakesink) claim to accept the meta on caps but won't
468 put it in the allocation query result, this leads below
469 check to fail. Prevent this by removing the meta from caps */
470 gst_caps_unref (caps);
471 caps = gst_caps_ref (original_caps);
472 ret = gst_pad_set_caps (overlay->srcpad, caps);
473 if (ret && !gst_cea_cc_overlay_can_handle_caps (caps))
474 ret = FALSE;
475 }
476 }
477
478 if (!ret) {
479 GST_DEBUG_OBJECT (overlay, "negotiation failed, schedule reconfigure");
480 gst_pad_mark_reconfigure (overlay->srcpad);
481 }
482 gst_caps_unref (caps);
483 GST_DEBUG_OBJECT (overlay, "ret=%d", ret);
484
485 return ret;
486
487 no_format:
488 {
489 if (caps)
490 gst_caps_unref (caps);
491 return FALSE;
492 }
493 }
494
495 static gboolean
gst_cea_cc_overlay_can_handle_caps(GstCaps * incaps)496 gst_cea_cc_overlay_can_handle_caps (GstCaps * incaps)
497 {
498 gboolean ret;
499 GstCaps *caps;
500 static GstStaticCaps static_caps = GST_STATIC_CAPS (CC_OVERLAY_CAPS);
501
502 caps = gst_static_caps_get (&static_caps);
503 ret = gst_caps_is_subset (incaps, caps);
504 gst_caps_unref (caps);
505
506 return ret;
507 }
508
509 static gboolean
gst_cea_cc_overlay_setcaps(GstCeaCcOverlay * overlay,GstCaps * caps)510 gst_cea_cc_overlay_setcaps (GstCeaCcOverlay * overlay, GstCaps * caps)
511 {
512 GstVideoInfo info;
513 gboolean ret = FALSE;
514
515 if (!gst_video_info_from_caps (&info, caps))
516 goto invalid_caps;
517
518 overlay->info = info;
519 overlay->format = GST_VIDEO_INFO_FORMAT (&info);
520 overlay->width = GST_VIDEO_INFO_WIDTH (&info);
521 overlay->height = GST_VIDEO_INFO_HEIGHT (&info);
522 gst_cea708dec_set_video_width_height (overlay->decoder, overlay->width,
523 overlay->height);
524 ret = gst_cea_cc_overlay_negotiate (overlay, caps);
525
526 GST_CEA_CC_OVERLAY_LOCK (overlay);
527 g_mutex_lock (GST_CEA_CC_OVERLAY_GET_CLASS (overlay)->pango_lock);
528 if (!overlay->attach_compo_to_buffer &&
529 !gst_cea_cc_overlay_can_handle_caps (caps)) {
530 GST_DEBUG_OBJECT (overlay, "unsupported caps %" GST_PTR_FORMAT, caps);
531 ret = FALSE;
532 }
533
534 g_mutex_unlock (GST_CEA_CC_OVERLAY_GET_CLASS (overlay)->pango_lock);
535 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
536
537 return ret;
538
539 /* ERRORS */
540 invalid_caps:
541 {
542 GST_DEBUG_OBJECT (overlay, "could not parse caps");
543 return FALSE;
544 }
545 }
546
547 static void
gst_cea_cc_overlay_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)548 gst_cea_cc_overlay_set_property (GObject * object, guint prop_id,
549 const GValue * value, GParamSpec * pspec)
550 {
551 GstCeaCcOverlay *overlay = GST_CEA_CC_OVERLAY (object);
552 Cea708Dec *decoder = overlay->decoder;
553
554 GST_CEA_CC_OVERLAY_LOCK (overlay);
555 switch (prop_id) {
556 case PROP_SERVICE_NUMBER:
557 {
558 int desired_service = g_value_get_int (value);
559 gst_cea708dec_set_service_number (decoder, desired_service);
560 break;
561 }
562 case PROP_FONT_DESC:
563 {
564 PangoFontDescription *desc = NULL;
565 const gchar *fontdesc_str;
566 fontdesc_str = g_value_get_string (value);
567
568 GST_LOG_OBJECT (overlay, "Got font description '%s'", fontdesc_str);
569 if (fontdesc_str)
570 desc = pango_font_description_from_string (fontdesc_str);
571 /* Only set if NULL or valid description */
572 if (desc || !fontdesc_str) {
573 if (desc) {
574 GST_INFO_OBJECT (overlay, "Setting font description: '%s'",
575 fontdesc_str);
576 pango_font_description_free (desc);
577 } else
578 GST_INFO_OBJECT (overlay, "Resetting default font description");
579 g_free (decoder->default_font_desc);
580 decoder->default_font_desc = g_strdup (fontdesc_str);
581 }
582 break;
583 }
584 case PROP_SILENT:
585 overlay->silent = g_value_get_boolean (value);
586 break;
587 case PROP_WINDOW_H_POS:
588 overlay->default_window_h_pos = g_value_get_enum (value);
589 break;
590 default:
591 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
592 break;
593 }
594
595 overlay->need_update = TRUE;
596 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
597 }
598
599 static void
gst_cea_cc_overlay_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)600 gst_cea_cc_overlay_get_property (GObject * object, guint prop_id,
601 GValue * value, GParamSpec * pspec)
602 {
603 GstCeaCcOverlay *overlay = GST_CEA_CC_OVERLAY (object);
604 Cea708Dec *decoder = overlay->decoder;
605
606 GST_CEA_CC_OVERLAY_LOCK (overlay);
607 switch (prop_id) {
608 case PROP_SERVICE_NUMBER:
609 g_value_set_int (value, decoder->desired_service);
610 break;
611 case PROP_SILENT:
612 g_value_set_boolean (value, overlay->silent);
613 break;
614 case PROP_FONT_DESC:
615 g_value_set_string (value, decoder->default_font_desc);
616 break;
617 case PROP_WINDOW_H_POS:
618 g_value_set_enum (value, overlay->default_window_h_pos);
619 break;
620 default:
621 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
622 break;
623 }
624
625 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
626 }
627
628 static gboolean
gst_cea_cc_overlay_src_query(GstPad * pad,GstObject * parent,GstQuery * query)629 gst_cea_cc_overlay_src_query (GstPad * pad, GstObject * parent,
630 GstQuery * query)
631 {
632 gboolean ret = FALSE;
633 GstCeaCcOverlay *overlay;
634
635 overlay = GST_CEA_CC_OVERLAY (parent);
636
637 switch (GST_QUERY_TYPE (query)) {
638 case GST_QUERY_CAPS:
639 {
640 GstCaps *filter, *caps;
641
642 gst_query_parse_caps (query, &filter);
643 caps = gst_cea_cc_overlay_get_src_caps (pad, overlay, filter);
644 gst_query_set_caps_result (query, caps);
645 gst_caps_unref (caps);
646 ret = TRUE;
647 break;
648 }
649 default:
650 ret = gst_pad_query_default (pad, parent, query);
651 break;
652 }
653
654 return ret;
655 }
656
657 static gboolean
gst_cea_cc_overlay_src_event(GstPad * pad,GstObject * parent,GstEvent * event)658 gst_cea_cc_overlay_src_event (GstPad * pad, GstObject * parent,
659 GstEvent * event)
660 {
661 GstCeaCcOverlay *overlay;
662 gboolean ret;
663
664 overlay = GST_CEA_CC_OVERLAY (parent);
665
666 if (overlay->cc_pad_linked) {
667 gst_event_ref (event);
668 ret = gst_pad_push_event (overlay->video_sinkpad, event);
669 gst_pad_push_event (overlay->cc_sinkpad, event);
670 } else {
671 ret = gst_pad_push_event (overlay->video_sinkpad, event);
672 }
673
674 return ret;
675 }
676
677 /**
678 * gst_cea_cc_overlay_add_feature_and_intersect:
679 *
680 * Creates a new #GstCaps containing the (given caps +
681 * given caps feature) + (given caps intersected by the
682 * given filter).
683 *
684 * Returns: the new #GstCaps
685 */
686 static GstCaps *
gst_cea_cc_overlay_add_feature_and_intersect(GstCaps * caps,const gchar * feature,GstCaps * filter)687 gst_cea_cc_overlay_add_feature_and_intersect (GstCaps * caps,
688 const gchar * feature, GstCaps * filter)
689 {
690 int i, caps_size;
691 GstCaps *new_caps;
692
693 new_caps = gst_caps_copy (caps);
694
695 caps_size = gst_caps_get_size (new_caps);
696 for (i = 0; i < caps_size; i++) {
697 GstCapsFeatures *features = gst_caps_get_features (new_caps, i);
698
699 if (!gst_caps_features_is_any (features)) {
700 gst_caps_features_add (features, feature);
701 }
702 }
703
704 gst_caps_append (new_caps, gst_caps_intersect_full (caps,
705 filter, GST_CAPS_INTERSECT_FIRST));
706
707 return new_caps;
708 }
709
710 /**
711 * gst_cea_cc_overlay_intersect_by_feature:
712 *
713 * Creates a new #GstCaps based on the following filtering rule.
714 *
715 * For each individual caps contained in given caps, if the
716 * caps uses the given caps feature, keep a version of the caps
717 * with the feature and an another one without. Otherwise, intersect
718 * the caps with the given filter.
719 *
720 * Returns: the new #GstCaps
721 */
722 static GstCaps *
gst_cea_cc_overlay_intersect_by_feature(GstCaps * caps,const gchar * feature,GstCaps * filter)723 gst_cea_cc_overlay_intersect_by_feature (GstCaps * caps,
724 const gchar * feature, GstCaps * filter)
725 {
726 int i, caps_size;
727 GstCaps *new_caps;
728
729 new_caps = gst_caps_new_empty ();
730
731 caps_size = gst_caps_get_size (caps);
732 for (i = 0; i < caps_size; i++) {
733 GstStructure *caps_structure = gst_caps_get_structure (caps, i);
734 GstCapsFeatures *caps_features =
735 gst_caps_features_copy (gst_caps_get_features (caps, i));
736 GstCaps *filtered_caps;
737 GstCaps *simple_caps =
738 gst_caps_new_full (gst_structure_copy (caps_structure), NULL);
739 gst_caps_set_features (simple_caps, 0, caps_features);
740
741 if (gst_caps_features_contains (caps_features, feature)) {
742 gst_caps_append (new_caps, gst_caps_copy (simple_caps));
743
744 gst_caps_features_remove (caps_features, feature);
745 filtered_caps = gst_caps_ref (simple_caps);
746 } else {
747 filtered_caps = gst_caps_intersect_full (simple_caps, filter,
748 GST_CAPS_INTERSECT_FIRST);
749 }
750 gst_caps_unref (simple_caps);
751 gst_caps_append (new_caps, filtered_caps);
752 }
753
754 return new_caps;
755 }
756
757 static GstCaps *
gst_cea_cc_overlay_get_videosink_caps(GstPad * pad,GstCeaCcOverlay * overlay,GstCaps * filter)758 gst_cea_cc_overlay_get_videosink_caps (GstPad * pad,
759 GstCeaCcOverlay * overlay, GstCaps * filter)
760 {
761 GstPad *srcpad = overlay->srcpad;
762 GstCaps *peer_caps = NULL, *caps = NULL, *overlay_filter = NULL;
763
764 if (G_UNLIKELY (!overlay))
765 return gst_pad_get_pad_template_caps (pad);
766
767 if (filter) {
768 /* filter caps + composition feature + filter caps
769 * filtered by the software caps. */
770 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
771 overlay_filter = gst_cea_cc_overlay_add_feature_and_intersect (filter,
772 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
773 gst_caps_unref (sw_caps);
774
775 GST_DEBUG_OBJECT (overlay, "overlay filter %" GST_PTR_FORMAT,
776 overlay_filter);
777 }
778
779 peer_caps = gst_pad_peer_query_caps (srcpad, overlay_filter);
780 if (overlay_filter)
781 gst_caps_unref (overlay_filter);
782 if (peer_caps) {
783
784 GST_DEBUG_OBJECT (pad, "peer caps %" GST_PTR_FORMAT, peer_caps);
785
786 if (gst_caps_is_any (peer_caps)) {
787 /* if peer returns ANY caps, return filtered src pad template caps */
788 caps = gst_caps_copy (gst_pad_get_pad_template_caps (srcpad));
789 } else {
790
791 /* duplicate caps which contains the composition into one version with
792 * the meta and one without. Filter the other caps by the software caps */
793 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
794 caps = gst_cea_cc_overlay_intersect_by_feature (peer_caps,
795 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
796 gst_caps_unref (sw_caps);
797 }
798
799 gst_caps_unref (peer_caps);
800
801 } else {
802 /* no peer, our padtemplate is enough then */
803 caps = gst_pad_get_pad_template_caps (pad);
804 }
805
806 if (filter) {
807 GstCaps *intersection = gst_caps_intersect_full (filter, caps,
808 GST_CAPS_INTERSECT_FIRST);
809 gst_caps_unref (caps);
810 caps = intersection;
811 }
812
813 GST_DEBUG_OBJECT (overlay, "returning %" GST_PTR_FORMAT, caps);
814
815 return caps;
816 }
817
818 static GstCaps *
gst_cea_cc_overlay_get_src_caps(GstPad * pad,GstCeaCcOverlay * overlay,GstCaps * filter)819 gst_cea_cc_overlay_get_src_caps (GstPad * pad, GstCeaCcOverlay * overlay,
820 GstCaps * filter)
821 {
822 GstPad *sinkpad = overlay->video_sinkpad;
823 GstCaps *peer_caps = NULL, *caps = NULL, *overlay_filter = NULL;
824
825 if (G_UNLIKELY (!overlay))
826 return gst_pad_get_pad_template_caps (pad);
827
828 if (filter) {
829 /* duplicate filter caps which contains the composition into one version
830 * with the meta and one without. Filter the other caps by the software
831 * caps */
832 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
833 overlay_filter =
834 gst_cea_cc_overlay_intersect_by_feature (filter,
835 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
836 gst_caps_unref (sw_caps);
837 }
838
839 peer_caps = gst_pad_peer_query_caps (sinkpad, overlay_filter);
840
841 if (overlay_filter)
842 gst_caps_unref (overlay_filter);
843
844 if (peer_caps) {
845
846 GST_DEBUG_OBJECT (pad, "peer caps %" GST_PTR_FORMAT, peer_caps);
847
848 if (gst_caps_is_any (peer_caps)) {
849
850 /* if peer returns ANY caps, return filtered sink pad template caps */
851 caps = gst_caps_copy (gst_pad_get_pad_template_caps (sinkpad));
852
853 } else {
854
855 /* return upstream caps + composition feature + upstream caps
856 * filtered by the software caps. */
857 GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
858 caps = gst_cea_cc_overlay_add_feature_and_intersect (peer_caps,
859 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
860 gst_caps_unref (sw_caps);
861 }
862
863 gst_caps_unref (peer_caps);
864
865 } else {
866 /* no peer, our padtemplate is enough then */
867 caps = gst_pad_get_pad_template_caps (pad);
868 }
869
870 if (filter) {
871 GstCaps *intersection;
872
873 intersection =
874 gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
875 gst_caps_unref (caps);
876 caps = intersection;
877 }
878 GST_DEBUG_OBJECT (overlay, "returning %" GST_PTR_FORMAT, caps);
879
880 return caps;
881 }
882
883 /* FIXME: should probably be relative to width/height (adjusted for PAR) */
884 #define BOX_XPAD 6
885 #define BOX_YPAD 6
886
887 static GstFlowReturn
gst_cea_cc_overlay_push_frame(GstCeaCcOverlay * overlay,GstBuffer * video_frame)888 gst_cea_cc_overlay_push_frame (GstCeaCcOverlay * overlay,
889 GstBuffer * video_frame)
890 {
891 GstVideoFrame frame;
892
893 if (overlay->current_composition == NULL)
894 goto done;
895 GST_LOG_OBJECT (overlay, "gst_cea_cc_overlay_push_frame");
896
897 if (gst_pad_check_reconfigure (overlay->srcpad))
898 gst_cea_cc_overlay_negotiate (overlay, NULL);
899
900 video_frame = gst_buffer_make_writable (video_frame);
901
902 if (overlay->attach_compo_to_buffer) {
903 GST_DEBUG_OBJECT (overlay, "Attaching text overlay image to video buffer");
904 gst_buffer_add_video_overlay_composition_meta (video_frame,
905 overlay->current_composition);
906 goto done;
907 }
908
909 if (!gst_video_frame_map (&frame, &overlay->info, video_frame,
910 GST_MAP_READWRITE))
911 goto invalid_frame;
912
913 gst_video_overlay_composition_blend (overlay->current_composition, &frame);
914
915 gst_video_frame_unmap (&frame);
916
917 done:
918
919 return gst_pad_push (overlay->srcpad, video_frame);
920
921 /* ERRORS */
922 invalid_frame:
923 {
924 gst_buffer_unref (video_frame);
925 return GST_FLOW_OK;
926 }
927 }
928
929 static GstPadLinkReturn
gst_cea_cc_overlay_cc_pad_link(GstPad * pad,GstObject * parent,GstPad * peer)930 gst_cea_cc_overlay_cc_pad_link (GstPad * pad, GstObject * parent, GstPad * peer)
931 {
932 GstCeaCcOverlay *overlay;
933
934 overlay = GST_CEA_CC_OVERLAY (parent);
935 if (G_UNLIKELY (!overlay))
936 return GST_PAD_LINK_REFUSED;
937
938 GST_DEBUG_OBJECT (overlay, "Closed Caption pad linked");
939
940 overlay->cc_pad_linked = TRUE;
941
942 return GST_PAD_LINK_OK;
943 }
944
945 static void
gst_cea_cc_overlay_cc_pad_unlink(GstPad * pad,GstObject * parent)946 gst_cea_cc_overlay_cc_pad_unlink (GstPad * pad, GstObject * parent)
947 {
948 GstCeaCcOverlay *overlay;
949
950 /* don't use gst_pad_get_parent() here, will deadlock */
951 overlay = GST_CEA_CC_OVERLAY (parent);
952
953 GST_DEBUG_OBJECT (overlay, "Closed Caption pad unlinked");
954
955 overlay->cc_pad_linked = FALSE;
956
957 gst_segment_init (&overlay->cc_segment, GST_FORMAT_UNDEFINED);
958 }
959
960 static gboolean
gst_cea_cc_overlay_cc_event(GstPad * pad,GstObject * parent,GstEvent * event)961 gst_cea_cc_overlay_cc_event (GstPad * pad, GstObject * parent, GstEvent * event)
962 {
963 gboolean ret = FALSE;
964 GstCeaCcOverlay *overlay = NULL;
965
966 overlay = GST_CEA_CC_OVERLAY (parent);
967
968 GST_LOG_OBJECT (overlay, "received event %s", GST_EVENT_TYPE_NAME (event));
969
970 switch (GST_EVENT_TYPE (event)) {
971 case GST_EVENT_CAPS:
972 {
973 GstCaps *caps;
974 GstStructure *st;
975 const gchar *cctype;
976
977 gst_event_parse_caps (event, &caps);
978 st = gst_caps_get_structure (caps, 0);
979 cctype = gst_structure_get_string (st, "format");
980 overlay->is_cdp = !g_strcmp0 (cctype, "cdp");
981 ret = TRUE;
982 break;
983 }
984 case GST_EVENT_SEGMENT:
985 {
986 const GstSegment *segment;
987
988 overlay->cc_eos = FALSE;
989
990 gst_event_parse_segment (event, &segment);
991
992 if (segment->format == GST_FORMAT_TIME) {
993 GST_CEA_CC_OVERLAY_LOCK (overlay);
994 gst_segment_copy_into (segment, &overlay->cc_segment);
995 GST_DEBUG_OBJECT (overlay, "TEXT SEGMENT now: %" GST_SEGMENT_FORMAT,
996 &overlay->cc_segment);
997 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
998 } else {
999 GST_ELEMENT_WARNING (overlay, STREAM, MUX, (NULL),
1000 ("received non-TIME newsegment event on text input"));
1001 }
1002
1003 gst_event_unref (event);
1004 ret = TRUE;
1005
1006 /* wake up the video chain, it might be waiting for a text buffer or
1007 * a text segment update */
1008 GST_CEA_CC_OVERLAY_LOCK (overlay);
1009 GST_CEA_CC_OVERLAY_BROADCAST (overlay);
1010 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
1011 break;
1012 }
1013 case GST_EVENT_GAP:
1014 {
1015 GstClockTime start, duration;
1016
1017 gst_event_parse_gap (event, &start, &duration);
1018 if (GST_CLOCK_TIME_IS_VALID (duration))
1019 start += duration;
1020 /* we do not expect another buffer until after gap,
1021 * so that is our position now */
1022 overlay->cc_segment.position = start;
1023
1024 /* wake up the video chain, it might be waiting for a text buffer or
1025 * a text segment update */
1026 GST_CEA_CC_OVERLAY_LOCK (overlay);
1027 GST_CEA_CC_OVERLAY_BROADCAST (overlay);
1028 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
1029
1030 gst_event_unref (event);
1031 ret = TRUE;
1032 break;
1033 }
1034 case GST_EVENT_FLUSH_STOP:
1035 GST_CEA_CC_OVERLAY_LOCK (overlay);
1036 GST_INFO_OBJECT (overlay, "text flush stop");
1037 overlay->cc_flushing = FALSE;
1038 overlay->cc_eos = FALSE;
1039 gst_cea_cc_overlay_pop_text (overlay);
1040 gst_segment_init (&overlay->cc_segment, GST_FORMAT_TIME);
1041 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
1042 gst_event_unref (event);
1043 ret = TRUE;
1044 break;
1045 case GST_EVENT_FLUSH_START:
1046 GST_CEA_CC_OVERLAY_LOCK (overlay);
1047 GST_INFO_OBJECT (overlay, "text flush start");
1048 overlay->cc_flushing = TRUE;
1049 GST_CEA_CC_OVERLAY_BROADCAST (overlay);
1050 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
1051 gst_event_unref (event);
1052 ret = TRUE;
1053 break;
1054 case GST_EVENT_EOS:
1055 GST_CEA_CC_OVERLAY_LOCK (overlay);
1056 overlay->cc_eos = TRUE;
1057 GST_INFO_OBJECT (overlay, "closed caption EOS");
1058 /* wake up the video chain, it might be waiting for a text buffer or
1059 * a text segment update */
1060 GST_CEA_CC_OVERLAY_BROADCAST (overlay);
1061 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
1062 gst_event_unref (event);
1063 ret = TRUE;
1064 break;
1065 default:
1066 ret = gst_pad_event_default (pad, parent, event);
1067 break;
1068 }
1069
1070 return ret;
1071 }
1072
1073 static gboolean
gst_cea_cc_overlay_video_event(GstPad * pad,GstObject * parent,GstEvent * event)1074 gst_cea_cc_overlay_video_event (GstPad * pad, GstObject * parent,
1075 GstEvent * event)
1076 {
1077 gboolean ret = FALSE;
1078 GstCeaCcOverlay *overlay = NULL;
1079
1080 overlay = GST_CEA_CC_OVERLAY (parent);
1081
1082 GST_DEBUG_OBJECT (pad, "received event %s", GST_EVENT_TYPE_NAME (event));
1083
1084 switch (GST_EVENT_TYPE (event)) {
1085 case GST_EVENT_CAPS:
1086 {
1087 GstCaps *caps;
1088
1089 gst_event_parse_caps (event, &caps);
1090 ret = gst_cea_cc_overlay_setcaps (overlay, caps);
1091 gst_event_unref (event);
1092 break;
1093 }
1094 case GST_EVENT_SEGMENT:
1095 {
1096 const GstSegment *segment;
1097
1098 GST_DEBUG_OBJECT (overlay, "received new segment");
1099
1100 gst_event_parse_segment (event, &segment);
1101
1102 if (segment->format == GST_FORMAT_TIME) {
1103 GST_DEBUG_OBJECT (overlay, "VIDEO SEGMENT now: %" GST_SEGMENT_FORMAT,
1104 &overlay->segment);
1105
1106 gst_segment_copy_into (segment, &overlay->segment);
1107 } else {
1108 GST_ELEMENT_WARNING (overlay, STREAM, MUX, (NULL),
1109 ("received non-TIME newsegment event on video input"));
1110 }
1111
1112 ret = gst_pad_event_default (pad, parent, event);
1113 break;
1114 }
1115 case GST_EVENT_EOS:
1116 GST_CEA_CC_OVERLAY_LOCK (overlay);
1117 GST_INFO_OBJECT (overlay, "video EOS");
1118 overlay->video_eos = TRUE;
1119 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
1120 ret = gst_pad_event_default (pad, parent, event);
1121 break;
1122 case GST_EVENT_FLUSH_START:
1123 GST_CEA_CC_OVERLAY_LOCK (overlay);
1124 GST_INFO_OBJECT (overlay, "video flush start");
1125 overlay->video_flushing = TRUE;
1126 GST_CEA_CC_OVERLAY_BROADCAST (overlay);
1127 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
1128 ret = gst_pad_event_default (pad, parent, event);
1129 break;
1130 case GST_EVENT_FLUSH_STOP:
1131 GST_CEA_CC_OVERLAY_LOCK (overlay);
1132 GST_INFO_OBJECT (overlay, "video flush stop");
1133 overlay->video_flushing = FALSE;
1134 overlay->video_eos = FALSE;
1135 gst_segment_init (&overlay->segment, GST_FORMAT_TIME);
1136 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
1137 ret = gst_pad_event_default (pad, parent, event);
1138 break;
1139 default:
1140 ret = gst_pad_event_default (pad, parent, event);
1141 break;
1142 }
1143
1144 return ret;
1145 }
1146
1147 static gboolean
gst_cea_cc_overlay_video_query(GstPad * pad,GstObject * parent,GstQuery * query)1148 gst_cea_cc_overlay_video_query (GstPad * pad, GstObject * parent,
1149 GstQuery * query)
1150 {
1151 gboolean ret = FALSE;
1152 GstCeaCcOverlay *overlay;
1153
1154 overlay = GST_CEA_CC_OVERLAY (parent);
1155
1156 switch (GST_QUERY_TYPE (query)) {
1157 case GST_QUERY_CAPS:
1158 {
1159 GstCaps *filter, *caps;
1160
1161 gst_query_parse_caps (query, &filter);
1162 caps = gst_cea_cc_overlay_get_videosink_caps (pad, overlay, filter);
1163 gst_query_set_caps_result (query, caps);
1164 gst_caps_unref (caps);
1165 ret = TRUE;
1166 break;
1167 }
1168 default:
1169 ret = gst_pad_query_default (pad, parent, query);
1170 break;
1171 }
1172
1173 return ret;
1174 }
1175
1176 /* Called with lock held */
1177 static void
gst_cea_cc_overlay_pop_text(GstCeaCcOverlay * overlay)1178 gst_cea_cc_overlay_pop_text (GstCeaCcOverlay * overlay)
1179 {
1180 g_return_if_fail (GST_IS_CEA_CC_OVERLAY (overlay));
1181
1182 if (GST_CLOCK_TIME_IS_VALID (overlay->current_comp_start_time)
1183 && overlay->current_composition) {
1184 GST_DEBUG_OBJECT (overlay, "releasing composition %p",
1185 overlay->current_composition);
1186 gst_video_overlay_composition_unref (overlay->current_composition);
1187 overlay->current_composition = NULL;
1188 overlay->current_comp_start_time = GST_CLOCK_TIME_NONE;
1189 }
1190
1191 /* Let the text task know we used that buffer */
1192 GST_CEA_CC_OVERLAY_BROADCAST (overlay);
1193 }
1194
1195 static void
gst_cea_cc_overlay_image_to_argb(guchar * pixbuf,cea708Window * window,int stride)1196 gst_cea_cc_overlay_image_to_argb (guchar * pixbuf,
1197 cea708Window * window, int stride)
1198 {
1199 int i, j;
1200 guchar *p, *bitp;
1201 int width, height;
1202
1203 width = window->image_width;
1204 height = window->image_height;
1205
1206 for (i = 0; i < height; i++) {
1207 p = pixbuf + i * stride;
1208 bitp = window->text_image + i * width * 4;
1209
1210 for (j = 0; j < width; j++) {
1211 p[0] = bitp[CAIRO_ARGB_A];
1212 p[1] = bitp[CAIRO_ARGB_R];
1213 p[2] = bitp[CAIRO_ARGB_G];
1214 p[3] = bitp[CAIRO_ARGB_B];
1215
1216 /* Cairo uses pre-multiplied ARGB, unpremultiply it */
1217 CAIRO_UNPREMULTIPLY (p[0], p[1], p[2], p[3]);
1218
1219 bitp += 4;
1220 p += 4;
1221 }
1222 }
1223 }
1224
1225 static void
gst_cea_cc_overlay_image_to_ayuv(guchar * pixbuf,cea708Window * window,int stride)1226 gst_cea_cc_overlay_image_to_ayuv (guchar * pixbuf,
1227 cea708Window * window, int stride)
1228 {
1229 int y; /* text bitmap coordinates */
1230 guchar *p, *bitp;
1231 guchar a, r, g, b;
1232 int width, height;
1233
1234 width = window->image_width;
1235 height = window->image_height;
1236
1237 for (y = 0; y < height; y++) {
1238 int n;
1239 p = pixbuf + y * stride;
1240 bitp = window->text_image + y * width * 4;
1241
1242 for (n = 0; n < width; n++) {
1243 b = bitp[CAIRO_ARGB_B];
1244 g = bitp[CAIRO_ARGB_G];
1245 r = bitp[CAIRO_ARGB_R];
1246 a = bitp[CAIRO_ARGB_A];
1247 bitp += 4;
1248
1249 /* Cairo uses pre-multiplied ARGB, unpremultiply it */
1250 CAIRO_UNPREMULTIPLY (a, r, g, b);
1251
1252 *p++ = a;
1253 *p++ = CLAMP ((int) (((19595 * r) >> 16) + ((38470 * g) >> 16) +
1254 ((7471 * b) >> 16)), 0, 255);
1255 *p++ = CLAMP ((int) (-((11059 * r) >> 16) - ((21709 * g) >> 16) +
1256 ((32768 * b) >> 16) + 128), 0, 255);
1257 *p++ = CLAMP ((int) (((32768 * r) >> 16) - ((27439 * g) >> 16) -
1258 ((5329 * b) >> 16) + 128), 0, 255);
1259 }
1260 }
1261 }
1262
1263 static void
gst_cea_cc_overlay_create_and_push_buffer(GstCeaCcOverlay * overlay)1264 gst_cea_cc_overlay_create_and_push_buffer (GstCeaCcOverlay * overlay)
1265 {
1266 Cea708Dec *decoder = overlay->decoder;
1267 GstBuffer *outbuf;
1268 GstMapInfo map;
1269 guint8 *window_image;
1270 gint n;
1271 guint window_id;
1272 cea708Window *window;
1273 guint v_anchor = 0;
1274 guint h_anchor = 0;
1275 GstVideoOverlayComposition *comp = NULL;
1276 GstVideoOverlayRectangle *rect = NULL;
1277 GST_CEA_CC_OVERLAY_LOCK (overlay);
1278
1279 for (window_id = 0; window_id < 8; window_id++) {
1280 window = decoder->cc_windows[window_id];
1281
1282 if (!window->updated) {
1283 continue;
1284 }
1285 if (!window->deleted && window->visible && window->text_image != NULL) {
1286 GST_DEBUG_OBJECT (overlay, "Allocating buffer");
1287 outbuf =
1288 gst_buffer_new_and_alloc (window->image_width *
1289 window->image_height * 4);
1290 gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
1291 window_image = map.data;
1292 if (decoder->use_ARGB) {
1293 memset (window_image, 0,
1294 window->image_width * window->image_height * 4);
1295 gst_buffer_add_video_meta (outbuf, GST_VIDEO_FRAME_FLAG_NONE,
1296 GST_VIDEO_OVERLAY_COMPOSITION_FORMAT_RGB, window->image_width,
1297 window->image_height);
1298 } else {
1299 for (n = 0; n < window->image_width * window->image_height; n++) {
1300 window_image[n * 4] = window_image[n * 4 + 1] = 0;
1301 window_image[n * 4 + 2] = window_image[n * 4 + 3] = 128;
1302 }
1303 gst_buffer_add_video_meta (outbuf, GST_VIDEO_FRAME_FLAG_NONE,
1304 GST_VIDEO_OVERLAY_COMPOSITION_FORMAT_YUV, window->image_width,
1305 window->image_height);
1306 }
1307
1308 v_anchor = window->screen_vertical * overlay->height / 100;
1309 switch (overlay->default_window_h_pos) {
1310 case GST_CEA_CC_OVERLAY_WIN_H_LEFT:
1311 window->h_offset = 0;
1312 break;
1313 case GST_CEA_CC_OVERLAY_WIN_H_CENTER:
1314 window->h_offset = (overlay->width - window->image_width) / 2;
1315 break;
1316 case GST_CEA_CC_OVERLAY_WIN_H_RIGHT:
1317 window->h_offset = overlay->width - window->image_width;
1318 break;
1319 case GST_CEA_CC_OVERLAY_WIN_H_AUTO:
1320 default:
1321 switch (window->anchor_point) {
1322 case ANCHOR_PT_TOP_LEFT:
1323 case ANCHOR_PT_MIDDLE_LEFT:
1324 case ANCHOR_PT_BOTTOM_LEFT:
1325 window->h_offset = h_anchor;
1326 break;
1327
1328 case ANCHOR_PT_TOP_CENTER:
1329 case ANCHOR_PT_CENTER:
1330 case ANCHOR_PT_BOTTOM_CENTER:
1331 window->h_offset = h_anchor - window->image_width / 2;
1332 break;
1333
1334 case ANCHOR_PT_TOP_RIGHT:
1335 case ANCHOR_PT_MIDDLE_RIGHT:
1336 case ANCHOR_PT_BOTTOM_RIGHT:
1337 window->h_offset = h_anchor - window->image_width;
1338 break;
1339 default:
1340 break;
1341 }
1342 break;
1343 }
1344
1345 switch (window->anchor_point) {
1346 case ANCHOR_PT_TOP_LEFT:
1347 case ANCHOR_PT_TOP_CENTER:
1348 case ANCHOR_PT_TOP_RIGHT:
1349 window->v_offset = v_anchor;
1350 break;
1351
1352 case ANCHOR_PT_MIDDLE_LEFT:
1353 case ANCHOR_PT_CENTER:
1354 case ANCHOR_PT_MIDDLE_RIGHT:
1355 window->v_offset = v_anchor - window->image_height / 2;
1356 break;
1357
1358 case ANCHOR_PT_BOTTOM_LEFT:
1359 case ANCHOR_PT_BOTTOM_CENTER:
1360 case ANCHOR_PT_BOTTOM_RIGHT:
1361 window->v_offset = v_anchor - window->image_height;
1362 break;
1363 default:
1364 break;
1365 }
1366 if (decoder->use_ARGB) {
1367 gst_cea_cc_overlay_image_to_argb (window_image, window,
1368 window->image_width * 4);
1369 } else {
1370 gst_cea_cc_overlay_image_to_ayuv (window_image, window,
1371 window->image_width * 4);
1372 }
1373 gst_buffer_unmap (outbuf, &map);
1374 GST_INFO_OBJECT (overlay,
1375 "window->anchor_point=%d,v_anchor=%d,h_anchor=%d,window->image_height=%d,window->image_width=%d, window->v_offset=%d, window->h_offset=%d,window->justify_mode=%d",
1376 window->anchor_point, v_anchor, h_anchor, window->image_height,
1377 window->image_width, window->v_offset, window->h_offset,
1378 window->justify_mode);
1379 rect =
1380 gst_video_overlay_rectangle_new_raw (outbuf, window->h_offset,
1381 window->v_offset, window->image_width, window->image_height, 0);
1382 if (comp == NULL) {
1383 comp = gst_video_overlay_composition_new (rect);
1384 } else {
1385 gst_video_overlay_composition_add_rectangle (comp, rect);
1386 }
1387 gst_video_overlay_rectangle_unref (rect);
1388 gst_buffer_unref (outbuf);
1389 }
1390 }
1391
1392 /* Wait for the previous buffer to go away */
1393 if (GST_CLOCK_TIME_IS_VALID (overlay->current_comp_start_time)) {
1394 overlay->next_composition = comp;
1395 overlay->next_comp_start_time = decoder->current_time;
1396 GST_DEBUG_OBJECT (overlay,
1397 "wait for render next %p, current is %p BUFFER: next ts=%"
1398 GST_TIME_FORMAT ",current ts=%" GST_TIME_FORMAT,
1399 overlay->next_composition, overlay->current_composition,
1400 GST_TIME_ARGS (overlay->next_comp_start_time),
1401 GST_TIME_ARGS (overlay->current_comp_start_time));
1402
1403 GST_DEBUG_OBJECT (overlay, "has a closed caption buffer queued, waiting");
1404 GST_CEA_CC_OVERLAY_WAIT (overlay);
1405 GST_DEBUG_OBJECT (overlay, "resuming");
1406 if (overlay->cc_flushing) {
1407 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
1408 return;
1409 }
1410 }
1411
1412 overlay->next_composition = NULL;
1413 overlay->next_comp_start_time = GST_CLOCK_TIME_NONE;
1414 overlay->current_composition = comp;
1415 overlay->current_comp_start_time = decoder->current_time;
1416 GST_DEBUG_OBJECT (overlay, "T: %" GST_TIME_FORMAT,
1417 GST_TIME_ARGS (overlay->current_comp_start_time));
1418 overlay->need_update = FALSE;
1419
1420 /* in case the video chain is waiting for a text buffer, wake it up */
1421 GST_CEA_CC_OVERLAY_BROADCAST (overlay);
1422 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
1423 }
1424
1425 static void
gst_cea_cc_overlay_process_packet(GstCeaCcOverlay * overlay,guint8 cc_type)1426 gst_cea_cc_overlay_process_packet (GstCeaCcOverlay * overlay, guint8 cc_type)
1427 {
1428 gint16 *index = NULL;
1429 guint8 *buffer = NULL;
1430 guint8 *dtvcc_buffer = NULL;
1431 gboolean need_render = FALSE;
1432
1433 switch (cc_type) {
1434 case CCTYPE_608_CC1:
1435 case CCTYPE_608_CC2:
1436 index = &overlay->cea608_index[cc_type];
1437 buffer = overlay->cea608_buffer[cc_type];
1438 break;
1439
1440 case CCTYPE_708_ADD:
1441 case CCTYPE_708_START:
1442 index = &overlay->cea708_index;
1443 buffer = overlay->cea708_buffer;
1444 break;
1445 default:
1446 GST_ERROR_OBJECT (overlay,
1447 "attempted to process packet for unknown cc_type %d", cc_type);
1448 return;
1449 }
1450
1451 if (*index > 0) {
1452 /*TODO: in future need add 608 decoder, currently only deal with 708 */
1453 if (cc_type == CCTYPE_708_ADD || cc_type == CCTYPE_708_START) {
1454 GST_LOG_OBJECT (overlay,
1455 "called - buf[%" G_GINT16_FORMAT "] = %02X:%02X:%02X:%02X", *index,
1456 buffer[0], buffer[1], buffer[2], buffer[3]);
1457 dtvcc_buffer = g_malloc0 (*index + 1);
1458 memcpy (dtvcc_buffer, buffer, *index);
1459 need_render =
1460 gst_cea708dec_process_dtvcc_packet (overlay->decoder, dtvcc_buffer,
1461 *index);
1462 g_free (dtvcc_buffer);
1463 if (need_render)
1464 gst_cea_cc_overlay_create_and_push_buffer (overlay);
1465 }
1466 }
1467 *index = 0;
1468 }
1469
1470
1471 /**
1472 * gst_cea_cc_overlay_user_data_decode:
1473 * @overlay: The #GstCeaCcOverlay
1474 * @user_data: The #GstMpegVideoCCData to decode
1475 *
1476 * decode closed caption data and render when necessary
1477 * in struct GstMpegVideoCCData type's user_data's data field, 3 byte's data construct 1 cc_data_pkt
1478 *
1479 * A cc_data_pkt is 3 bytes as follows:
1480 * -------------------------------------------
1481 * 5 bits (b7-b3) marker_bits (should be all 1's)
1482 * 1 bit (b2) cc_valid
1483 * 2 bits (b1-b0) cc_type (bslbf)
1484 * 8 bits cc_data_1 (bslbf)
1485 * 8 bits cc_data_2 (bslbf)
1486 *
1487 * If cc_valid != 1, then ignore this packet
1488 *
1489 * cc_type has these values:
1490 * 0 NTSC_CC_FIELD_1 - CEA-608
1491 * 1 NTSC_CC_FIELD_2 - CEA-608
1492 * 2 DTVCC_PACKET_DATA - CEA-708
1493 * 3 DTVCC_PACKET_START - CEA-708
1494 *
1495 * DTVCC packet (aka. caption channel packet)
1496 * This is formed by accumulating cc_data_1/cc_data_2 from each cc_data_pkt
1497 * starting with a packet where cc_type = 3, and ending with a packet
1498 * where again cc_type = 3 (start of next buffer), or cc_valid=0 && cc_type=2
1499 * DTVCC packet's structure is:
1500 * --------------------------------------------------------------------------
1501 * 2 bits (b6-b7) sequence_number
1502 * 6 bits (b0-b5) packet_size
1503 * ((packet_size*2-1)&0xFF) * 8 bits packet_data (Service Block)
1504 */
1505 static void
gst_cea_cc_overlay_user_data_decode(GstCeaCcOverlay * overlay,const guint8 * ccdata,gsize ccsize)1506 gst_cea_cc_overlay_user_data_decode (GstCeaCcOverlay * overlay,
1507 const guint8 * ccdata, gsize ccsize)
1508 {
1509 guint8 temp;
1510 guint8 cc_count;
1511 guint i;
1512 guint8 cc_type;
1513 guint8 cc_valid;
1514 guint8 cc_data[2];
1515
1516 cc_count = ccsize / 3;
1517
1518 for (i = 0; i < cc_count; i++) {
1519 temp = *ccdata++;
1520 cc_data[0] = *ccdata++;
1521 cc_data[1] = *ccdata++;
1522 cc_valid = (temp & CCTYPE_VALID_MASK) ? TRUE : FALSE;
1523 cc_type = (temp & CCTYPE_TYPE_MASK);
1524
1525 GST_LOG_OBJECT (overlay, "cc_data_pkt(%d): cc_valid=%d cc_type=%d "
1526 "cc_data[0]=0x%02X cc_data[1]=0x%02X",
1527 i, cc_valid, cc_type, cc_data[0], cc_data[1]);
1528
1529 /* accumulate dvtcc packet */
1530 switch (cc_type) {
1531 case CCTYPE_608_CC1:
1532 case CCTYPE_608_CC2:
1533 if (cc_valid) {
1534 if (overlay->cea608_index[cc_type] <= DTVCC_LENGTH - 2) {
1535 size_t j;
1536 for (j = 0; j < 2; ++j) {
1537 if ((cc_data[j] < ' ') || (cc_data[j] > '~')) {
1538 gst_cea_cc_overlay_process_packet (overlay, cc_type);
1539 }
1540 overlay->cea608_buffer[cc_type][overlay->
1541 cea608_index[cc_type]++] = cc_data[j];
1542 }
1543 } else {
1544 GST_ERROR_OBJECT (overlay, "cea608_buffer[%d] overflow!", cc_type);
1545 }
1546 }
1547 break;
1548
1549 case CCTYPE_708_ADD:
1550 case CCTYPE_708_START:
1551 if (cc_valid) {
1552 if (cc_type == CCTYPE_708_START) {
1553 /* The previous packet is complete */
1554 gst_cea_cc_overlay_process_packet (overlay, cc_type);
1555 }
1556 /* Add on to the current DTVCC packet */
1557 if (overlay->cea708_index <= DTVCC_LENGTH - 2) {
1558 overlay->cea708_buffer[overlay->cea708_index++] = cc_data[0];
1559 overlay->cea708_buffer[overlay->cea708_index++] = cc_data[1];
1560 } else {
1561 GST_ERROR_OBJECT (overlay, "cea708_buffer overflow!");
1562 }
1563 } else if (cc_type == CCTYPE_708_ADD) {
1564 /* This packet should be ignored, but if there is a current */
1565 /* DTVCC packet then this is the end. */
1566 gst_cea_cc_overlay_process_packet (overlay, cc_type);
1567 }
1568 break;
1569 }
1570 }
1571 }
1572
1573 /* FIXME : Move to GstVideo ANC/CC helper library */
1574 static gboolean
extract_ccdata_from_cdp(const guint8 * indata,gsize insize,const guint8 ** ccdata,gsize * ccsize)1575 extract_ccdata_from_cdp (const guint8 * indata, gsize insize,
1576 const guint8 ** ccdata, gsize * ccsize)
1577 {
1578 GstByteReader br;
1579 guint8 cdp_length;
1580 guint8 flags;
1581 #ifndef GST_DISABLE_GST_DEBUG
1582 guint8 framerate_code;
1583 guint16 seqhdr;
1584 #endif
1585
1586 GST_MEMDUMP ("CDP", indata, insize);
1587
1588 gst_byte_reader_init (&br, indata, insize);
1589
1590 /* The smallest valid CDP we are interested in is 7 (header) + 2 (cc
1591 * section) + 4 (footer) bytes long */
1592 if (gst_byte_reader_get_remaining (&br) < 13)
1593 return FALSE;
1594
1595 /* Check header */
1596 if (gst_byte_reader_get_uint16_be_unchecked (&br) != 0x9669) {
1597 GST_WARNING ("Invalid CDP header");
1598 return FALSE;
1599 }
1600 cdp_length = gst_byte_reader_get_uint8_unchecked (&br);
1601 if (cdp_length > insize) {
1602 GST_WARNING ("CDP too small (need %d bytes, have %" G_GSIZE_FORMAT ")",
1603 cdp_length, insize);
1604 return FALSE;
1605 }
1606 #ifndef GST_DISABLE_GST_DEBUG
1607 framerate_code = gst_byte_reader_get_uint8_unchecked (&br) >> 4;
1608 #else
1609 gst_byte_reader_skip (&br, 1);
1610 #endif
1611 flags = gst_byte_reader_get_uint8_unchecked (&br);
1612 #ifndef GST_DISABLE_GST_DEBUG
1613 seqhdr = gst_byte_reader_get_uint16_be_unchecked (&br);
1614 #else
1615 gst_byte_reader_skip (&br, 2);
1616 #endif
1617
1618 GST_DEBUG
1619 ("framerate_code : 0x%02x , flags : 0x%02x , sequencer_counter : %u",
1620 framerate_code, flags, seqhdr);
1621
1622 /* Skip timecode if present */
1623 if (flags & 0x80) {
1624 GST_LOG ("Skipping timecode section");
1625 gst_byte_reader_skip (&br, 5);
1626 }
1627
1628 /* cc data */
1629 if (flags & 0x40) {
1630 guint8 ccid, cc_count;
1631 if (!gst_byte_reader_get_uint8 (&br, &ccid) ||
1632 !gst_byte_reader_get_uint8 (&br, &cc_count))
1633 return FALSE;
1634 if (ccid != 0x72) {
1635 GST_WARNING ("Invalid ccdata_id (expected 0x72, got 0x%02x)", ccid);
1636 return FALSE;
1637 }
1638 cc_count &= 0x1f;
1639 if (!gst_byte_reader_get_data (&br, cc_count * 3, ccdata)) {
1640 GST_WARNING ("Not enough ccdata");
1641 *ccdata = NULL;
1642 *ccsize = 0;
1643 return FALSE;
1644 }
1645 *ccsize = cc_count * 3;
1646 }
1647
1648 /* FIXME : Parse/validate the rest of the CDP ! */
1649
1650 return TRUE;
1651 }
1652
1653 /* We receive text buffers here. If they are out of segment we just ignore them.
1654 If the buffer is in our segment we keep it internally except if another one
1655 is already waiting here, in that case we wait that it gets kicked out */
1656 static GstFlowReturn
gst_cea_cc_overlay_cc_chain(GstPad * pad,GstObject * parent,GstBuffer * buffer)1657 gst_cea_cc_overlay_cc_chain (GstPad * pad, GstObject * parent,
1658 GstBuffer * buffer)
1659 {
1660 GstFlowReturn ret = GST_FLOW_OK;
1661 GstCeaCcOverlay *overlay = (GstCeaCcOverlay *) parent;
1662 gboolean in_seg = FALSE;
1663 guint64 clip_start = 0, clip_stop = 0;
1664
1665 GST_CEA_CC_OVERLAY_LOCK (overlay);
1666
1667 if (overlay->cc_flushing) {
1668 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
1669 ret = GST_FLOW_FLUSHING;
1670 GST_LOG_OBJECT (overlay, "closed caption flushing");
1671 goto beach;
1672 }
1673
1674 if (overlay->cc_eos) {
1675 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
1676 ret = GST_FLOW_EOS;
1677 GST_LOG_OBJECT (overlay, "closed caption EOS");
1678 goto beach;
1679 }
1680
1681 GST_LOG_OBJECT (overlay, "%" GST_SEGMENT_FORMAT " BUFFER: ts=%"
1682 GST_TIME_FORMAT ", end=%" GST_TIME_FORMAT, &overlay->segment,
1683 GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
1684 GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer) +
1685 GST_BUFFER_DURATION (buffer)));
1686
1687 if (G_LIKELY (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))) {
1688 GstClockTime stop;
1689
1690 if (G_LIKELY (GST_BUFFER_DURATION_IS_VALID (buffer)))
1691 stop = GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer);
1692 else
1693 stop = GST_CLOCK_TIME_NONE;
1694
1695 in_seg = gst_segment_clip (&overlay->cc_segment, GST_FORMAT_TIME,
1696 GST_BUFFER_TIMESTAMP (buffer), stop, &clip_start, &clip_stop);
1697 GST_LOG_OBJECT (overlay, "stop:%" GST_TIME_FORMAT ", in_seg: %d",
1698 GST_TIME_ARGS (stop), in_seg);
1699 } else {
1700 in_seg = TRUE;
1701 }
1702
1703
1704 if (in_seg) {
1705 GstMapInfo buf_map = { 0 };
1706 const guint8 *ccdata = NULL;
1707 gsize ccsize = 0;
1708
1709 overlay->cc_segment.position = clip_start;
1710 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
1711
1712 gst_buffer_map (buffer, &buf_map, GST_MAP_READ);
1713 if (overlay->is_cdp) {
1714 extract_ccdata_from_cdp (buf_map.data, buf_map.size, &ccdata, &ccsize);
1715 } else {
1716 ccdata = buf_map.data;
1717 ccsize = buf_map.size;
1718 }
1719 if (ccsize) {
1720 gst_cea_cc_overlay_user_data_decode (overlay, ccdata, ccsize);
1721 overlay->decoder->current_time = GST_BUFFER_PTS (buffer);
1722 }
1723 gst_buffer_unmap (buffer, &buf_map);
1724 } else {
1725 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
1726 }
1727
1728 beach:
1729 gst_buffer_unref (buffer);
1730 return ret;
1731 }
1732
1733 static GstFlowReturn
gst_cea_cc_overlay_video_chain(GstPad * pad,GstObject * parent,GstBuffer * buffer)1734 gst_cea_cc_overlay_video_chain (GstPad * pad, GstObject * parent,
1735 GstBuffer * buffer)
1736 {
1737 GstCeaCcOverlay *overlay;
1738 GstFlowReturn ret = GST_FLOW_OK;
1739 gboolean in_seg = FALSE;
1740 guint64 start, stop, clip_start = 0, clip_stop = 0;
1741
1742 overlay = GST_CEA_CC_OVERLAY (parent);
1743
1744 if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
1745 goto missing_timestamp;
1746
1747 /* ignore buffers that are outside of the current segment */
1748 start = GST_BUFFER_TIMESTAMP (buffer);
1749
1750 if (!GST_BUFFER_DURATION_IS_VALID (buffer)) {
1751 stop = GST_CLOCK_TIME_NONE;
1752 } else {
1753 stop = start + GST_BUFFER_DURATION (buffer);
1754 }
1755
1756 GST_LOG_OBJECT (overlay, "%" GST_SEGMENT_FORMAT " BUFFER: ts=%"
1757 GST_TIME_FORMAT ", end=%" GST_TIME_FORMAT, &overlay->segment,
1758 GST_TIME_ARGS (start), GST_TIME_ARGS (stop));
1759
1760 /* segment_clip() will adjust start unconditionally to segment_start if
1761 * no stop time is provided, so handle this ourselves */
1762 if (stop == GST_CLOCK_TIME_NONE && start < overlay->segment.start)
1763 goto out_of_segment;
1764
1765 in_seg = gst_segment_clip (&overlay->segment, GST_FORMAT_TIME, start, stop,
1766 &clip_start, &clip_stop);
1767
1768 if (!in_seg)
1769 goto out_of_segment;
1770
1771 /* if the buffer is only partially in the segment, fix up stamps */
1772 if (clip_start != start || (stop != -1 && clip_stop != stop)) {
1773 GST_DEBUG_OBJECT (overlay, "clipping buffer timestamp/duration to segment");
1774 buffer = gst_buffer_make_writable (buffer);
1775 GST_BUFFER_TIMESTAMP (buffer) = clip_start;
1776 if (stop != -1)
1777 GST_BUFFER_DURATION (buffer) = clip_stop - clip_start;
1778 }
1779
1780 /* now, after we've done the clipping, fix up end time if there's no
1781 * duration (we only use those estimated values internally though, we
1782 * don't want to set bogus values on the buffer itself) */
1783 if (stop == -1) {
1784 if (overlay->info.fps_n && overlay->info.fps_d) {
1785 GST_DEBUG_OBJECT (overlay, "estimating duration based on framerate");
1786 stop = start + gst_util_uint64_scale_int (GST_SECOND,
1787 overlay->info.fps_d, overlay->info.fps_n);
1788 } else {
1789 GST_LOG_OBJECT (overlay, "no duration, assuming minimal duration");
1790 stop = start + 1; /* we need to assume some interval */
1791 }
1792 }
1793
1794 gst_object_sync_values (GST_OBJECT (overlay), GST_BUFFER_TIMESTAMP (buffer));
1795
1796 wait_for_text_buf:
1797
1798 GST_CEA_CC_OVERLAY_LOCK (overlay);
1799
1800 if (overlay->video_flushing)
1801 goto flushing;
1802
1803 if (overlay->video_eos)
1804 goto have_eos;
1805
1806 if (overlay->silent) {
1807 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
1808 ret = gst_pad_push (overlay->srcpad, buffer);
1809
1810 /* Update position */
1811 overlay->segment.position = clip_start;
1812
1813 return ret;
1814 }
1815
1816 /* Closed Caption pad not linked, rendering video only */
1817 if (!overlay->cc_pad_linked) {
1818 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
1819 ret = gst_pad_push (overlay->srcpad, buffer);
1820 } else {
1821 /* Closed Caption pad linked, check if we have a text buffer queued */
1822 if (GST_CLOCK_TIME_IS_VALID (overlay->current_comp_start_time)) {
1823 gboolean pop_text = FALSE, valid_text_time = TRUE;
1824
1825 GstClockTime text_running_time = GST_CLOCK_TIME_NONE;
1826 GstClockTime next_buffer_text_running_time = GST_CLOCK_TIME_NONE;
1827 #ifndef GST_DISABLE_GST_DEBUG
1828 GstClockTime vid_running_time;
1829 #endif
1830 GstClockTime vid_running_time_end;
1831
1832 #ifndef GST_DISABLE_GST_DEBUG
1833 vid_running_time =
1834 gst_segment_to_running_time (&overlay->segment, GST_FORMAT_TIME,
1835 start);
1836 #endif
1837 vid_running_time_end =
1838 gst_segment_to_running_time (&overlay->segment, GST_FORMAT_TIME,
1839 stop);
1840 if (GST_CLOCK_TIME_IS_VALID (overlay->next_comp_start_time)) {
1841 next_buffer_text_running_time =
1842 gst_segment_to_running_time (&overlay->cc_segment, GST_FORMAT_TIME,
1843 overlay->next_comp_start_time);
1844
1845 if (next_buffer_text_running_time < vid_running_time_end) {
1846 /* text buffer should be force updated, popping */
1847 GST_DEBUG_OBJECT (overlay,
1848 "T: next_buffer_text_running_time: %" GST_TIME_FORMAT
1849 " - overlay->next_comp_start_time: %" GST_TIME_FORMAT,
1850 GST_TIME_ARGS (next_buffer_text_running_time),
1851 GST_TIME_ARGS (overlay->next_comp_start_time));
1852 GST_DEBUG_OBJECT (overlay,
1853 "V: %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT,
1854 GST_TIME_ARGS (vid_running_time),
1855 GST_TIME_ARGS (vid_running_time_end));
1856 GST_LOG_OBJECT (overlay,
1857 "text buffer should be force updated, popping");
1858 pop_text = FALSE;
1859 gst_cea_cc_overlay_pop_text (overlay);
1860 GST_CEA_CC_OVERLAY_WAIT (overlay);
1861 GST_DEBUG_OBJECT (overlay, "resuming");
1862 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
1863 goto wait_for_text_buf;
1864 }
1865
1866 }
1867
1868 /* if the text buffer isn't stamped right, pop it off the
1869 * queue and display it for the current video frame only */
1870 if (!GST_CLOCK_TIME_IS_VALID (overlay->current_comp_start_time)) {
1871 GST_WARNING_OBJECT (overlay, "Got text buffer with invalid timestamp");
1872 pop_text = TRUE;
1873 valid_text_time = FALSE;
1874 }
1875
1876 /* If timestamp and duration are valid */
1877 if (valid_text_time) {
1878 text_running_time =
1879 gst_segment_to_running_time (&overlay->cc_segment,
1880 GST_FORMAT_TIME, overlay->current_comp_start_time);
1881 }
1882
1883 GST_DEBUG_OBJECT (overlay, "T: %" GST_TIME_FORMAT,
1884 GST_TIME_ARGS (text_running_time));
1885 GST_DEBUG_OBJECT (overlay, "V: %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT,
1886 GST_TIME_ARGS (vid_running_time),
1887 GST_TIME_ARGS (vid_running_time_end));
1888
1889 if (valid_text_time && vid_running_time_end <= text_running_time) {
1890 GST_LOG_OBJECT (overlay, "text in future, pushing video buf");
1891 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
1892 /* Push the video frame */
1893 ret = gst_pad_push (overlay->srcpad, buffer);
1894 } else {
1895 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
1896 ret = gst_cea_cc_overlay_push_frame (overlay, buffer);
1897 }
1898 if (pop_text) {
1899 GST_CEA_CC_OVERLAY_LOCK (overlay);
1900 gst_cea_cc_overlay_pop_text (overlay);
1901 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
1902 }
1903 } else {
1904 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
1905 GST_LOG_OBJECT (overlay, "no need to wait for a text buffer");
1906 ret = gst_pad_push (overlay->srcpad, buffer);
1907 }
1908 }
1909
1910 /* Update position */
1911 overlay->segment.position = clip_start;
1912 GST_DEBUG_OBJECT (overlay, "ret=%d", ret);
1913
1914 return ret;
1915
1916 missing_timestamp:
1917 {
1918 GST_WARNING_OBJECT (overlay, "buffer without timestamp, discarding");
1919 gst_buffer_unref (buffer);
1920 return GST_FLOW_OK;
1921 }
1922
1923 flushing:
1924 {
1925 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
1926 GST_DEBUG_OBJECT (overlay, "flushing, discarding buffer");
1927 gst_buffer_unref (buffer);
1928 return GST_FLOW_FLUSHING;
1929 }
1930 have_eos:
1931 {
1932 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
1933 GST_DEBUG_OBJECT (overlay, "eos, discarding buffer");
1934 gst_buffer_unref (buffer);
1935 return GST_FLOW_EOS;
1936 }
1937 out_of_segment:
1938 {
1939 GST_DEBUG_OBJECT (overlay, "buffer out of segment, discarding");
1940 gst_buffer_unref (buffer);
1941 return GST_FLOW_OK;
1942 }
1943 }
1944
1945 static GstStateChangeReturn
gst_cea_cc_overlay_change_state(GstElement * element,GstStateChange transition)1946 gst_cea_cc_overlay_change_state (GstElement * element,
1947 GstStateChange transition)
1948 {
1949 GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
1950 GstCeaCcOverlay *overlay = GST_CEA_CC_OVERLAY (element);
1951
1952 switch (transition) {
1953 case GST_STATE_CHANGE_PAUSED_TO_READY:
1954 GST_CEA_CC_OVERLAY_LOCK (overlay);
1955 overlay->cc_flushing = TRUE;
1956 overlay->video_flushing = TRUE;
1957 /* pop_text will broadcast on the GCond and thus also make the video
1958 * chain exit if it's waiting for a text buffer */
1959 gst_cea_cc_overlay_pop_text (overlay);
1960 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
1961 break;
1962 default:
1963 break;
1964 }
1965
1966 ret = parent_class->change_state (element, transition);
1967 if (ret == GST_STATE_CHANGE_FAILURE)
1968 return ret;
1969
1970 switch (transition) {
1971 case GST_STATE_CHANGE_READY_TO_PAUSED:
1972 GST_CEA_CC_OVERLAY_LOCK (overlay);
1973 overlay->cc_flushing = FALSE;
1974 overlay->video_flushing = FALSE;
1975 overlay->video_eos = FALSE;
1976 overlay->cc_eos = FALSE;
1977 gst_segment_init (&overlay->segment, GST_FORMAT_TIME);
1978 gst_segment_init (&overlay->cc_segment, GST_FORMAT_TIME);
1979 GST_CEA_CC_OVERLAY_UNLOCK (overlay);
1980 break;
1981 default:
1982 break;
1983 }
1984
1985 return ret;
1986 }
1987