• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2008 Benjamin Schmitz <vortex@wolpzone.de>
3  * Copyright (c) 2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
4  *
5  * This library is free software; you can redistribute it and/or
6  * modify it under the terms of the GNU Library General Public
7  * License as published by the Free Software Foundation; either
8  * version 2 of the License, or (at your option) any later version.
9  *
10  * This library is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13  * Library General Public License for more details.
14  *
15  * You should have received a copy of the GNU Library General Public
16  * License along with this library; if not, write to the
17  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18  * Boston, MA 02110-1301, USA.
19  */
20 
21 /**
22  * SECTION:element-assrender
23  * @title: assrender
24  *
25  * Renders timestamped SSA/ASS subtitles on top of a video stream.
26  *
27  * ## Example launch line
28  * |[
29  * gst-launch-1.0 -v filesrc location=/path/to/mkv ! matroskademux name=d ! queue ! mpegaudioparse ! mpg123audiodec ! audioconvert ! autoaudiosink  d. ! queue ! h264parse ! avdec_h264 ! videoconvert ! r.   d. ! queue ! "application/x-ass" ! assrender name=r ! videoconvert ! autovideosink
30  * ]| This pipeline demuxes a Matroska file with h.264 video, MP3 audio and embedded ASS subtitles and renders the subtitles on top of the video.
31  *
32  */
33 
34 #ifdef HAVE_CONFIG_H
35 #  include <config.h>
36 #endif
37 
38 #include <gst/video/gstvideometa.h>
39 
40 #include "gstassrender.h"
41 
42 #include <string.h>
43 
44 GST_DEBUG_CATEGORY_STATIC (gst_ass_render_debug);
45 GST_DEBUG_CATEGORY_STATIC (gst_ass_render_lib_debug);
46 #define GST_CAT_DEFAULT gst_ass_render_debug
47 
48 /* Filter signals and props */
49 enum
50 {
51   LAST_SIGNAL
52 };
53 
54 enum
55 {
56   PROP_0,
57   PROP_ENABLE,
58   PROP_EMBEDDEDFONTS,
59   PROP_WAIT_TEXT
60 };
61 
62 /* FIXME: video-blend.c doesn't support formats with more than 8 bit per
63  * component (which get unpacked into ARGB64 or AYUV64) yet, such as:
64  *  v210, v216, UYVP, GRAY16_LE, GRAY16_BE */
65 #define FORMATS "{ BGRx, RGBx, xRGB, xBGR, RGBA, BGRA, ARGB, ABGR, RGB, BGR, \
66     I420, YV12, AYUV, YUY2, UYVY, v308, Y41B, Y42B, Y444, \
67     NV12, NV21, A420, YUV9, YVU9, IYU1, GRAY8 }"
68 
69 #define ASSRENDER_CAPS GST_VIDEO_CAPS_MAKE(FORMATS)
70 
71 #define ASSRENDER_ALL_CAPS ASSRENDER_CAPS ";" \
72     GST_VIDEO_CAPS_MAKE_WITH_FEATURES ("ANY", GST_VIDEO_FORMATS_ALL)
73 
74 static GstStaticCaps sw_template_caps = GST_STATIC_CAPS (ASSRENDER_CAPS);
75 
76 static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
77     GST_PAD_SRC,
78     GST_PAD_ALWAYS,
79     GST_STATIC_CAPS (ASSRENDER_ALL_CAPS)
80     );
81 
82 static GstStaticPadTemplate video_sink_factory =
83 GST_STATIC_PAD_TEMPLATE ("video_sink",
84     GST_PAD_SINK,
85     GST_PAD_ALWAYS,
86     GST_STATIC_CAPS (ASSRENDER_ALL_CAPS)
87     );
88 
89 static GstStaticPadTemplate text_sink_factory =
90     GST_STATIC_PAD_TEMPLATE ("text_sink",
91     GST_PAD_SINK,
92     GST_PAD_ALWAYS,
93     GST_STATIC_CAPS ("application/x-ass; application/x-ssa")
94     );
95 
96 #define GST_ASS_RENDER_GET_LOCK(ass) (&GST_ASS_RENDER (ass)->lock)
97 #define GST_ASS_RENDER_GET_COND(ass) (&GST_ASS_RENDER (ass)->cond)
98 #define GST_ASS_RENDER_LOCK(ass)     (g_mutex_lock (GST_ASS_RENDER_GET_LOCK (ass)))
99 #define GST_ASS_RENDER_UNLOCK(ass)   (g_mutex_unlock (GST_ASS_RENDER_GET_LOCK (ass)))
100 #define GST_ASS_RENDER_WAIT(ass)     (g_cond_wait (GST_ASS_RENDER_GET_COND (ass), GST_ASS_RENDER_GET_LOCK (ass)))
101 #define GST_ASS_RENDER_SIGNAL(ass)   (g_cond_signal (GST_ASS_RENDER_GET_COND (ass)))
102 #define GST_ASS_RENDER_BROADCAST(ass)(g_cond_broadcast (GST_ASS_RENDER_GET_COND (ass)))
103 
104 static void gst_ass_render_set_property (GObject * object, guint prop_id,
105     const GValue * value, GParamSpec * pspec);
106 static void gst_ass_render_get_property (GObject * object, guint prop_id,
107     GValue * value, GParamSpec * pspec);
108 
109 static void gst_ass_render_finalize (GObject * object);
110 
111 static GstStateChangeReturn gst_ass_render_change_state (GstElement * element,
112     GstStateChange transition);
113 
114 #define gst_ass_render_parent_class parent_class
115 G_DEFINE_TYPE (GstAssRender, gst_ass_render, GST_TYPE_ELEMENT);
116 #define _do_init \
117   GST_DEBUG_CATEGORY_INIT (gst_ass_render_debug, "assrender", \
118       0, "ASS/SSA subtitle renderer");\
119   GST_DEBUG_CATEGORY_INIT (gst_ass_render_lib_debug, "assrender_library",\
120       0, "ASS/SSA subtitle renderer library");
121 GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (assrender, "assrender",
122     GST_RANK_PRIMARY, GST_TYPE_ASS_RENDER, _do_init);
123 
124 static GstCaps *gst_ass_render_get_videosink_caps (GstPad * pad,
125     GstAssRender * render, GstCaps * filter);
126 static GstCaps *gst_ass_render_get_src_caps (GstPad * pad,
127     GstAssRender * render, GstCaps * filter);
128 
129 static gboolean gst_ass_render_setcaps_video (GstPad * pad,
130     GstAssRender * render, GstCaps * caps);
131 static gboolean gst_ass_render_setcaps_text (GstPad * pad,
132     GstAssRender * render, GstCaps * caps);
133 
134 static GstFlowReturn gst_ass_render_chain_video (GstPad * pad,
135     GstObject * parent, GstBuffer * buf);
136 static GstFlowReturn gst_ass_render_chain_text (GstPad * pad,
137     GstObject * parent, GstBuffer * buf);
138 
139 static gboolean gst_ass_render_event_video (GstPad * pad, GstObject * parent,
140     GstEvent * event);
141 static gboolean gst_ass_render_event_text (GstPad * pad, GstObject * parent,
142     GstEvent * event);
143 static gboolean gst_ass_render_event_src (GstPad * pad, GstObject * parent,
144     GstEvent * event);
145 
146 static gboolean gst_ass_render_query_video (GstPad * pad, GstObject * parent,
147     GstQuery * query);
148 static gboolean gst_ass_render_query_src (GstPad * pad, GstObject * parent,
149     GstQuery * query);
150 
151 /* initialize the plugin's class */
152 static void
gst_ass_render_class_init(GstAssRenderClass * klass)153 gst_ass_render_class_init (GstAssRenderClass * klass)
154 {
155   GObjectClass *gobject_class = (GObjectClass *) klass;
156   GstElementClass *gstelement_class = (GstElementClass *) klass;
157 
158   gobject_class->set_property = gst_ass_render_set_property;
159   gobject_class->get_property = gst_ass_render_get_property;
160   gobject_class->finalize = gst_ass_render_finalize;
161 
162   g_object_class_install_property (gobject_class, PROP_ENABLE,
163       g_param_spec_boolean ("enable", "Enable",
164           "Enable rendering of subtitles", TRUE,
165           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
166 
167   g_object_class_install_property (gobject_class, PROP_EMBEDDEDFONTS,
168       g_param_spec_boolean ("embeddedfonts", "Embedded Fonts",
169           "Extract and use fonts embedded in the stream", TRUE,
170           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
171 
172   g_object_class_install_property (gobject_class, PROP_WAIT_TEXT,
173       g_param_spec_boolean ("wait-text", "Wait Text",
174           "Whether to wait for subtitles", TRUE,
175           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
176 
177   gstelement_class->change_state =
178       GST_DEBUG_FUNCPTR (gst_ass_render_change_state);
179 
180   gst_element_class_add_static_pad_template (gstelement_class, &src_factory);
181   gst_element_class_add_static_pad_template (gstelement_class,
182       &video_sink_factory);
183   gst_element_class_add_static_pad_template (gstelement_class,
184       &text_sink_factory);
185 
186   gst_element_class_set_static_metadata (gstelement_class, "ASS/SSA Render",
187       "Mixer/Video/Overlay/Subtitle",
188       "Renders ASS/SSA subtitles with libass",
189       "Benjamin Schmitz <vortex@wolpzone.de>, "
190       "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
191 }
192 
193 static void
_libass_message_cb(gint level,const gchar * fmt,va_list args,gpointer render)194 _libass_message_cb (gint level, const gchar * fmt, va_list args,
195     gpointer render)
196 {
197   gchar *message = g_strdup_vprintf (fmt, args);
198 
199   if (level < 2)
200     GST_CAT_ERROR_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
201   else if (level < 4)
202     GST_CAT_WARNING_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
203   else if (level < 5)
204     GST_CAT_INFO_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
205   else if (level < 6)
206     GST_CAT_DEBUG_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
207   else
208     GST_CAT_LOG_OBJECT (gst_ass_render_lib_debug, render, "%s", message);
209 
210   g_free (message);
211 }
212 
213 static void
gst_ass_render_init(GstAssRender * render)214 gst_ass_render_init (GstAssRender * render)
215 {
216   GST_DEBUG_OBJECT (render, "init");
217 
218   render->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
219   render->video_sinkpad =
220       gst_pad_new_from_static_template (&video_sink_factory, "video_sink");
221   render->text_sinkpad =
222       gst_pad_new_from_static_template (&text_sink_factory, "text_sink");
223 
224   gst_pad_set_chain_function (render->video_sinkpad,
225       GST_DEBUG_FUNCPTR (gst_ass_render_chain_video));
226   gst_pad_set_chain_function (render->text_sinkpad,
227       GST_DEBUG_FUNCPTR (gst_ass_render_chain_text));
228 
229   gst_pad_set_event_function (render->video_sinkpad,
230       GST_DEBUG_FUNCPTR (gst_ass_render_event_video));
231   gst_pad_set_event_function (render->text_sinkpad,
232       GST_DEBUG_FUNCPTR (gst_ass_render_event_text));
233   gst_pad_set_event_function (render->srcpad,
234       GST_DEBUG_FUNCPTR (gst_ass_render_event_src));
235 
236   gst_pad_set_query_function (render->srcpad,
237       GST_DEBUG_FUNCPTR (gst_ass_render_query_src));
238   gst_pad_set_query_function (render->video_sinkpad,
239       GST_DEBUG_FUNCPTR (gst_ass_render_query_video));
240 
241   GST_PAD_SET_PROXY_ALLOCATION (render->video_sinkpad);
242 
243   gst_element_add_pad (GST_ELEMENT (render), render->srcpad);
244   gst_element_add_pad (GST_ELEMENT (render), render->video_sinkpad);
245   gst_element_add_pad (GST_ELEMENT (render), render->text_sinkpad);
246 
247   gst_video_info_init (&render->info);
248 
249   g_mutex_init (&render->lock);
250   g_cond_init (&render->cond);
251 
252   render->renderer_init_ok = FALSE;
253   render->track_init_ok = FALSE;
254   render->enable = TRUE;
255   render->embeddedfonts = TRUE;
256   render->wait_text = FALSE;
257 
258   gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
259   gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
260 
261   g_mutex_init (&render->ass_mutex);
262   render->ass_library = ass_library_init ();
263   ass_set_message_cb (render->ass_library, _libass_message_cb, render);
264   ass_set_extract_fonts (render->ass_library, 1);
265 
266   render->ass_renderer = ass_renderer_init (render->ass_library);
267   if (!render->ass_renderer) {
268     GST_WARNING_OBJECT (render, "cannot create renderer instance");
269     g_assert_not_reached ();
270   }
271 
272   render->ass_track = NULL;
273 
274   GST_DEBUG_OBJECT (render, "init complete");
275 }
276 
277 static void
gst_ass_render_finalize(GObject * object)278 gst_ass_render_finalize (GObject * object)
279 {
280   GstAssRender *render = GST_ASS_RENDER (object);
281 
282   g_mutex_clear (&render->lock);
283   g_cond_clear (&render->cond);
284 
285   if (render->ass_track) {
286     ass_free_track (render->ass_track);
287   }
288 
289   if (render->ass_renderer) {
290     ass_renderer_done (render->ass_renderer);
291   }
292 
293   if (render->ass_library) {
294     ass_library_done (render->ass_library);
295   }
296 
297   g_mutex_clear (&render->ass_mutex);
298 
299   G_OBJECT_CLASS (parent_class)->finalize (object);
300 }
301 
302 static void
gst_ass_render_reset_composition(GstAssRender * render)303 gst_ass_render_reset_composition (GstAssRender * render)
304 {
305   if (render->composition) {
306     gst_video_overlay_composition_unref (render->composition);
307     render->composition = NULL;
308   }
309 }
310 
311 static void
gst_ass_render_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)312 gst_ass_render_set_property (GObject * object, guint prop_id,
313     const GValue * value, GParamSpec * pspec)
314 {
315   GstAssRender *render = GST_ASS_RENDER (object);
316 
317   GST_ASS_RENDER_LOCK (render);
318   switch (prop_id) {
319     case PROP_ENABLE:
320       render->enable = g_value_get_boolean (value);
321       break;
322     case PROP_EMBEDDEDFONTS:
323       render->embeddedfonts = g_value_get_boolean (value);
324       g_mutex_lock (&render->ass_mutex);
325       ass_set_extract_fonts (render->ass_library, render->embeddedfonts);
326       g_mutex_unlock (&render->ass_mutex);
327       break;
328     case PROP_WAIT_TEXT:
329       render->wait_text = g_value_get_boolean (value);
330       break;
331     default:
332       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
333       break;
334   }
335   GST_ASS_RENDER_UNLOCK (render);
336 }
337 
338 static void
gst_ass_render_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)339 gst_ass_render_get_property (GObject * object, guint prop_id,
340     GValue * value, GParamSpec * pspec)
341 {
342   GstAssRender *render = GST_ASS_RENDER (object);
343 
344   GST_ASS_RENDER_LOCK (render);
345   switch (prop_id) {
346     case PROP_ENABLE:
347       g_value_set_boolean (value, render->enable);
348       break;
349     case PROP_EMBEDDEDFONTS:
350       g_value_set_boolean (value, render->embeddedfonts);
351       break;
352     case PROP_WAIT_TEXT:
353       g_value_set_boolean (value, render->wait_text);
354       break;
355     default:
356       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
357       break;
358   }
359   GST_ASS_RENDER_UNLOCK (render);
360 }
361 
362 /* Called with lock held */
363 static void
gst_ass_render_pop_text(GstAssRender * render)364 gst_ass_render_pop_text (GstAssRender * render)
365 {
366   while (render->subtitle_pending) {
367     GST_DEBUG_OBJECT (render, "releasing text buffer %p",
368         render->subtitle_pending->data);
369     gst_buffer_unref (render->subtitle_pending->data);
370     render->subtitle_pending =
371         g_slist_delete_link (render->subtitle_pending,
372         render->subtitle_pending);
373   }
374 
375   /* Let the text task know we used that buffer */
376   GST_ASS_RENDER_BROADCAST (render);
377 }
378 
379 static GstStateChangeReturn
gst_ass_render_change_state(GstElement * element,GstStateChange transition)380 gst_ass_render_change_state (GstElement * element, GstStateChange transition)
381 {
382   GstAssRender *render = GST_ASS_RENDER (element);
383   GstStateChangeReturn ret;
384 
385   switch (transition) {
386     case GST_STATE_CHANGE_PAUSED_TO_READY:
387       GST_ASS_RENDER_LOCK (render);
388       render->subtitle_flushing = TRUE;
389       render->video_flushing = TRUE;
390       gst_ass_render_pop_text (render);
391       GST_ASS_RENDER_UNLOCK (render);
392       break;
393     default:
394       break;
395   }
396 
397   ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
398   if (ret == GST_STATE_CHANGE_FAILURE)
399     return ret;
400 
401   switch (transition) {
402     case GST_STATE_CHANGE_PAUSED_TO_READY:
403       g_mutex_lock (&render->ass_mutex);
404       if (render->ass_track)
405         ass_free_track (render->ass_track);
406       render->ass_track = NULL;
407       render->track_init_ok = FALSE;
408       render->renderer_init_ok = FALSE;
409       gst_ass_render_reset_composition (render);
410       g_mutex_unlock (&render->ass_mutex);
411       break;
412     case GST_STATE_CHANGE_READY_TO_PAUSED:
413       GST_ASS_RENDER_LOCK (render);
414       render->subtitle_flushing = FALSE;
415       render->video_flushing = FALSE;
416       render->video_eos = FALSE;
417       render->subtitle_eos = FALSE;
418       gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
419       gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
420       GST_ASS_RENDER_UNLOCK (render);
421       break;
422     default:
423       break;
424   }
425 
426 
427   return ret;
428 }
429 
430 static gboolean
gst_ass_render_query_src(GstPad * pad,GstObject * parent,GstQuery * query)431 gst_ass_render_query_src (GstPad * pad, GstObject * parent, GstQuery * query)
432 {
433   gboolean res = FALSE;
434 
435   switch (GST_QUERY_TYPE (query)) {
436     case GST_QUERY_CAPS:
437     {
438       GstCaps *filter, *caps;
439 
440       gst_query_parse_caps (query, &filter);
441       caps = gst_ass_render_get_src_caps (pad, (GstAssRender *) parent, filter);
442       gst_query_set_caps_result (query, caps);
443       gst_caps_unref (caps);
444       res = TRUE;
445       break;
446     }
447     default:
448       res = gst_pad_query_default (pad, parent, query);
449       break;
450   }
451 
452   return res;
453 }
454 
455 static gboolean
gst_ass_render_event_src(GstPad * pad,GstObject * parent,GstEvent * event)456 gst_ass_render_event_src (GstPad * pad, GstObject * parent, GstEvent * event)
457 {
458   GstAssRender *render = GST_ASS_RENDER (parent);
459   gboolean ret;
460 
461   GST_DEBUG_OBJECT (render, "received src event %" GST_PTR_FORMAT, event);
462 
463   /* FIXME: why not just always push it on text pad? */
464   if (render->track_init_ok) {
465     ret = gst_pad_push_event (render->video_sinkpad, gst_event_ref (event));
466     gst_pad_push_event (render->text_sinkpad, event);
467   } else {
468     ret = gst_pad_push_event (render->video_sinkpad, event);
469   }
470 
471   return ret;
472 }
473 
474 /**
475  * gst_ass_render_add_feature_and_intersect:
476  *
477  * Creates a new #GstCaps containing the (given caps +
478  * given caps feature) + (given caps intersected by the
479  * given filter).
480  *
481  * Returns: the new #GstCaps
482  */
483 static GstCaps *
gst_ass_render_add_feature_and_intersect(GstCaps * caps,const gchar * feature,GstCaps * filter)484 gst_ass_render_add_feature_and_intersect (GstCaps * caps,
485     const gchar * feature, GstCaps * filter)
486 {
487   int i, caps_size;
488   GstCaps *new_caps;
489 
490   new_caps = gst_caps_copy (caps);
491 
492   caps_size = gst_caps_get_size (new_caps);
493   for (i = 0; i < caps_size; i++) {
494     GstCapsFeatures *features = gst_caps_get_features (new_caps, i);
495     if (!gst_caps_features_is_any (features)) {
496       gst_caps_features_add (features, feature);
497     }
498   }
499 
500   gst_caps_append (new_caps, gst_caps_intersect_full (caps,
501           filter, GST_CAPS_INTERSECT_FIRST));
502 
503   return new_caps;
504 }
505 
506 /**
507  * gst_ass_render_intersect_by_feature:
508  *
509  * Creates a new #GstCaps based on the following filtering rule.
510  *
511  * For each individual caps contained in given caps, if the
512  * caps uses the given caps feature, keep a version of the caps
513  * with the feature and an another one without. Otherwise, intersect
514  * the caps with the given filter.
515  *
516  * Returns: the new #GstCaps
517  */
518 static GstCaps *
gst_ass_render_intersect_by_feature(GstCaps * caps,const gchar * feature,GstCaps * filter)519 gst_ass_render_intersect_by_feature (GstCaps * caps,
520     const gchar * feature, GstCaps * filter)
521 {
522   int i, caps_size;
523   GstCaps *new_caps;
524 
525   new_caps = gst_caps_new_empty ();
526 
527   caps_size = gst_caps_get_size (caps);
528   for (i = 0; i < caps_size; i++) {
529     GstStructure *caps_structure = gst_caps_get_structure (caps, i);
530     GstCapsFeatures *caps_features =
531         gst_caps_features_copy (gst_caps_get_features (caps, i));
532     GstCaps *filtered_caps;
533     GstCaps *simple_caps =
534         gst_caps_new_full (gst_structure_copy (caps_structure), NULL);
535     gst_caps_set_features (simple_caps, 0, caps_features);
536 
537     if (gst_caps_features_contains (caps_features, feature)) {
538       gst_caps_append (new_caps, gst_caps_copy (simple_caps));
539 
540       gst_caps_features_remove (caps_features, feature);
541       filtered_caps = gst_caps_ref (simple_caps);
542     } else {
543       filtered_caps = gst_caps_intersect_full (simple_caps, filter,
544           GST_CAPS_INTERSECT_FIRST);
545     }
546 
547     gst_caps_unref (simple_caps);
548     gst_caps_append (new_caps, filtered_caps);
549   }
550 
551   return new_caps;
552 }
553 
554 static GstCaps *
gst_ass_render_get_videosink_caps(GstPad * pad,GstAssRender * render,GstCaps * filter)555 gst_ass_render_get_videosink_caps (GstPad * pad, GstAssRender * render,
556     GstCaps * filter)
557 {
558   GstPad *srcpad = render->srcpad;
559   GstCaps *peer_caps = NULL, *caps = NULL, *assrender_filter = NULL;
560 
561   if (filter) {
562     /* filter caps + composition feature + filter caps
563      * filtered by the software caps. */
564     GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
565     assrender_filter = gst_ass_render_add_feature_and_intersect (filter,
566         GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
567     gst_caps_unref (sw_caps);
568 
569     GST_DEBUG_OBJECT (render, "assrender filter %" GST_PTR_FORMAT,
570         assrender_filter);
571   }
572 
573   peer_caps = gst_pad_peer_query_caps (srcpad, assrender_filter);
574 
575   if (assrender_filter)
576     gst_caps_unref (assrender_filter);
577 
578   if (peer_caps) {
579 
580     GST_DEBUG_OBJECT (pad, "peer caps  %" GST_PTR_FORMAT, peer_caps);
581 
582     if (gst_caps_is_any (peer_caps)) {
583 
584       /* if peer returns ANY caps, return filtered src pad template caps */
585       caps = gst_caps_copy (gst_pad_get_pad_template_caps (srcpad));
586     } else {
587 
588       /* duplicate caps which contains the composition into one version with
589        * the meta and one without. Filter the other caps by the software caps */
590       GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
591       caps = gst_ass_render_intersect_by_feature (peer_caps,
592           GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
593       gst_caps_unref (sw_caps);
594     }
595 
596     gst_caps_unref (peer_caps);
597 
598   } else {
599     /* no peer, our padtemplate is enough then */
600     caps = gst_pad_get_pad_template_caps (pad);
601   }
602 
603   if (filter) {
604     GstCaps *intersection = gst_caps_intersect_full (filter, caps,
605         GST_CAPS_INTERSECT_FIRST);
606     gst_caps_unref (caps);
607     caps = intersection;
608   }
609 
610   GST_DEBUG_OBJECT (render, "returning  %" GST_PTR_FORMAT, caps);
611 
612   return caps;
613 }
614 
615 static GstCaps *
gst_ass_render_get_src_caps(GstPad * pad,GstAssRender * render,GstCaps * filter)616 gst_ass_render_get_src_caps (GstPad * pad, GstAssRender * render,
617     GstCaps * filter)
618 {
619   GstPad *sinkpad = render->video_sinkpad;
620   GstCaps *peer_caps = NULL, *caps = NULL, *assrender_filter = NULL;
621 
622   if (filter) {
623     /* duplicate filter caps which contains the composition into one version
624      * with the meta and one without. Filter the other caps by the software
625      * caps */
626     GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
627     assrender_filter =
628         gst_ass_render_intersect_by_feature (filter,
629         GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
630     gst_caps_unref (sw_caps);
631   }
632 
633   peer_caps = gst_pad_peer_query_caps (sinkpad, assrender_filter);
634 
635   if (assrender_filter)
636     gst_caps_unref (assrender_filter);
637 
638   if (peer_caps) {
639 
640     GST_DEBUG_OBJECT (pad, "peer caps  %" GST_PTR_FORMAT, peer_caps);
641 
642     if (gst_caps_is_any (peer_caps)) {
643 
644       /* if peer returns ANY caps, return filtered sink pad template caps */
645       caps = gst_caps_copy (gst_pad_get_pad_template_caps (sinkpad));
646 
647     } else {
648 
649       /* return upstream caps + composition feature + upstream caps
650        * filtered by the software caps. */
651       GstCaps *sw_caps = gst_static_caps_get (&sw_template_caps);
652       caps = gst_ass_render_add_feature_and_intersect (peer_caps,
653           GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, sw_caps);
654       gst_caps_unref (sw_caps);
655     }
656 
657     gst_caps_unref (peer_caps);
658 
659   } else {
660     /* no peer, our padtemplate is enough then */
661     caps = gst_pad_get_pad_template_caps (pad);
662   }
663 
664   if (filter) {
665     GstCaps *intersection;
666 
667     intersection =
668         gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
669     gst_caps_unref (caps);
670     caps = intersection;
671   }
672 
673   GST_DEBUG_OBJECT (render, "returning  %" GST_PTR_FORMAT, caps);
674 
675   return caps;
676 }
677 
678 static void
blit_bgra_premultiplied(GstAssRender * render,ASS_Image * ass_image,guint8 * data,gint width,gint height,gint stride,gint x_off,gint y_off)679 blit_bgra_premultiplied (GstAssRender * render, ASS_Image * ass_image,
680     guint8 * data, gint width, gint height, gint stride, gint x_off, gint y_off)
681 {
682   guint counter = 0;
683   gint alpha, r, g, b, k;
684   const guint8 *src;
685   guint8 *dst;
686   gint x, y, w, h;
687   gint dst_skip;
688   gint src_skip;
689   gint dst_x, dst_y;
690 
691   memset (data, 0, stride * height);
692 
693   while (ass_image) {
694     dst_x = ass_image->dst_x + x_off;
695     dst_y = ass_image->dst_y + y_off;
696 
697     w = MIN (ass_image->w, width - dst_x);
698     h = MIN (ass_image->h, height - dst_y);
699     if (w <= 0 || h <= 0)
700       goto next;
701 
702     alpha = 255 - (ass_image->color & 0xff);
703     if (!alpha)
704       goto next;
705 
706     r = ((ass_image->color) >> 24) & 0xff;
707     g = ((ass_image->color) >> 16) & 0xff;
708     b = ((ass_image->color) >> 8) & 0xff;
709 
710     src = ass_image->bitmap;
711     dst = data + dst_y * stride + dst_x * 4;
712 
713     src_skip = ass_image->stride - w;
714     dst_skip = stride - w * 4;
715 
716     for (y = 0; y < h; y++) {
717       for (x = 0; x < w; x++) {
718         if (src[0]) {
719           k = src[0] * alpha / 255;
720           if (dst[3] == 0) {
721             dst[3] = k;
722             dst[2] = (k * r) / 255;
723             dst[1] = (k * g) / 255;
724             dst[0] = (k * b) / 255;
725           } else {
726             dst[3] = k + (255 - k) * dst[3] / 255;
727             dst[2] = (k * r + (255 - k) * dst[2]) / 255;
728             dst[1] = (k * g + (255 - k) * dst[1]) / 255;
729             dst[0] = (k * b + (255 - k) * dst[0]) / 255;
730           }
731         }
732         src++;
733         dst += 4;
734       }
735       src += src_skip;
736       dst += dst_skip;
737     }
738   next:
739     counter++;
740     ass_image = ass_image->next;
741   }
742   GST_LOG_OBJECT (render, "amount of rendered ass_image: %u", counter);
743 }
744 
745 static gboolean
gst_ass_render_can_handle_caps(GstCaps * incaps)746 gst_ass_render_can_handle_caps (GstCaps * incaps)
747 {
748   static GstStaticCaps static_caps = GST_STATIC_CAPS (ASSRENDER_CAPS);
749   gboolean ret;
750   GstCaps *caps;
751 
752   caps = gst_static_caps_get (&static_caps);
753   ret = gst_caps_is_subset (incaps, caps);
754   gst_caps_unref (caps);
755 
756   return ret;
757 }
758 
759 static void
gst_ass_render_update_render_size(GstAssRender * render)760 gst_ass_render_update_render_size (GstAssRender * render)
761 {
762   gdouble video_aspect = (gdouble) render->info.width /
763       (gdouble) render->info.height;
764   gdouble window_aspect = (gdouble) render->window_width /
765       (gdouble) render->window_height;
766 
767   /* render at the window size, with the video aspect ratio */
768   if (video_aspect >= window_aspect) {
769     render->ass_frame_width = render->window_width;
770     render->ass_frame_height = render->window_width / video_aspect;
771   } else {
772     render->ass_frame_width = render->window_height * video_aspect;
773     render->ass_frame_height = render->window_height;
774   }
775 }
776 
777 static gboolean
gst_ass_render_negotiate(GstAssRender * render,GstCaps * caps)778 gst_ass_render_negotiate (GstAssRender * render, GstCaps * caps)
779 {
780   gboolean upstream_has_meta = FALSE;
781   gboolean caps_has_meta = FALSE;
782   gboolean alloc_has_meta = FALSE;
783   gboolean attach = FALSE;
784   gboolean ret = TRUE;
785   guint width, height;
786   GstCapsFeatures *f;
787   GstCaps *overlay_caps;
788   GstQuery *query;
789   guint alloc_index;
790 
791   GST_DEBUG_OBJECT (render, "performing negotiation");
792 
793   /* Clear cached composition */
794   gst_ass_render_reset_composition (render);
795 
796   /* Clear any pending reconfigure flag */
797   gst_pad_check_reconfigure (render->srcpad);
798 
799   if (!caps)
800     caps = gst_pad_get_current_caps (render->video_sinkpad);
801   else
802     gst_caps_ref (caps);
803 
804   if (!caps || gst_caps_is_empty (caps))
805     goto no_format;
806 
807   /* Check if upstream caps have meta */
808   if ((f = gst_caps_get_features (caps, 0))) {
809     upstream_has_meta = gst_caps_features_contains (f,
810         GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION);
811   }
812 
813   /* Initialize dimensions */
814   width = render->info.width;
815   height = render->info.height;
816 
817   if (upstream_has_meta) {
818     overlay_caps = gst_caps_ref (caps);
819   } else {
820     GstCaps *peercaps;
821 
822     /* BaseTransform requires caps for the allocation query to work */
823     overlay_caps = gst_caps_copy (caps);
824     f = gst_caps_get_features (overlay_caps, 0);
825     gst_caps_features_add (f,
826         GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION);
827 
828     /* Then check if downstream accept overlay composition in caps */
829     /* FIXME: We should probably check if downstream *prefers* the
830      * overlay meta, and only enforce usage of it if we can't handle
831      * the format ourselves and thus would have to drop the overlays.
832      * Otherwise we should prefer what downstream wants here.
833      */
834     peercaps = gst_pad_peer_query_caps (render->srcpad, NULL);
835     caps_has_meta = gst_caps_can_intersect (peercaps, overlay_caps);
836     gst_caps_unref (peercaps);
837 
838     GST_DEBUG ("caps have overlay meta %d", caps_has_meta);
839   }
840 
841   if (upstream_has_meta || caps_has_meta) {
842     /* Send caps immediately, it's needed by GstBaseTransform to get a reply
843      * from allocation query */
844     ret = gst_pad_set_caps (render->srcpad, overlay_caps);
845 
846     /* First check if the allocation meta has compositon */
847     query = gst_query_new_allocation (overlay_caps, FALSE);
848 
849     if (!gst_pad_peer_query (render->srcpad, query)) {
850       /* no problem, we use the query defaults */
851       GST_DEBUG_OBJECT (render, "ALLOCATION query failed");
852 
853       /* In case we were flushing, mark reconfigure and fail this method,
854        * will make it retry */
855       if (render->video_flushing)
856         ret = FALSE;
857     }
858 
859     alloc_has_meta = gst_query_find_allocation_meta (query,
860         GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE, &alloc_index);
861 
862     GST_DEBUG ("sink alloc has overlay meta %d", alloc_has_meta);
863 
864     if (alloc_has_meta) {
865       const GstStructure *params;
866 
867       gst_query_parse_nth_allocation_meta (query, alloc_index, &params);
868       if (params) {
869         if (gst_structure_get (params, "width", G_TYPE_UINT, &width,
870                 "height", G_TYPE_UINT, &height, NULL)) {
871           GST_DEBUG ("received window size: %dx%d", width, height);
872           g_assert (width != 0 && height != 0);
873         }
874       }
875     }
876 
877     gst_query_unref (query);
878   }
879 
880   /* Update render size if needed */
881   render->window_width = width;
882   render->window_height = height;
883   gst_ass_render_update_render_size (render);
884 
885   /* For backward compatibility, we will prefer bliting if downstream
886    * allocation does not support the meta. In other case we will prefer
887    * attaching, and will fail the negotiation in the unlikely case we are
888    * force to blit, but format isn't supported. */
889 
890   if (upstream_has_meta) {
891     attach = TRUE;
892   } else if (caps_has_meta) {
893     if (alloc_has_meta) {
894       attach = TRUE;
895     } else {
896       /* Don't attach unless we cannot handle the format */
897       attach = !gst_ass_render_can_handle_caps (caps);
898     }
899   } else {
900     ret = gst_ass_render_can_handle_caps (caps);
901   }
902 
903   /* If we attach, then pick the overlay caps */
904   if (attach) {
905     GST_DEBUG_OBJECT (render, "Using caps %" GST_PTR_FORMAT, overlay_caps);
906     /* Caps where already sent */
907   } else if (ret) {
908     GST_DEBUG_OBJECT (render, "Using caps %" GST_PTR_FORMAT, caps);
909     ret = gst_pad_set_caps (render->srcpad, caps);
910   }
911 
912   render->attach_compo_to_buffer = attach;
913 
914   if (!ret) {
915     GST_DEBUG_OBJECT (render, "negotiation failed, schedule reconfigure");
916     gst_pad_mark_reconfigure (render->srcpad);
917   } else {
918     g_mutex_lock (&render->ass_mutex);
919     ass_set_frame_size (render->ass_renderer,
920         render->ass_frame_width, render->ass_frame_height);
921     ass_set_storage_size (render->ass_renderer,
922         render->info.width, render->info.height);
923     ass_set_pixel_aspect (render->ass_renderer,
924         (gdouble) render->info.par_n / (gdouble) render->info.par_d);
925     ass_set_font_scale (render->ass_renderer, 1.0);
926     ass_set_hinting (render->ass_renderer, ASS_HINTING_NONE);
927 
928     ass_set_fonts (render->ass_renderer, "Arial", "sans-serif", 1, NULL, 1);
929     ass_set_fonts (render->ass_renderer, NULL, "Sans", 1, NULL, 1);
930     ass_set_margins (render->ass_renderer, 0, 0, 0, 0);
931     ass_set_use_margins (render->ass_renderer, 0);
932     g_mutex_unlock (&render->ass_mutex);
933 
934     render->renderer_init_ok = TRUE;
935 
936     GST_DEBUG_OBJECT (render, "ass renderer setup complete");
937   }
938 
939   gst_caps_unref (overlay_caps);
940   gst_caps_unref (caps);
941 
942   if (!ret)
943     gst_pad_mark_reconfigure (render->srcpad);
944 
945   return ret;
946 
947 no_format:
948   {
949     if (caps)
950       gst_caps_unref (caps);
951     gst_pad_mark_reconfigure (render->srcpad);
952     return FALSE;
953   }
954 }
955 
956 static gboolean
gst_ass_render_setcaps_video(GstPad * pad,GstAssRender * render,GstCaps * caps)957 gst_ass_render_setcaps_video (GstPad * pad, GstAssRender * render,
958     GstCaps * caps)
959 {
960   GstVideoInfo info;
961   gboolean ret;
962 
963   if (!gst_video_info_from_caps (&info, caps))
964     goto invalid_caps;
965 
966   render->info = info;
967 
968   ret = gst_ass_render_negotiate (render, caps);
969 
970   GST_ASS_RENDER_LOCK (render);
971 
972   if (!render->attach_compo_to_buffer && !gst_ass_render_can_handle_caps (caps)) {
973     GST_DEBUG_OBJECT (render, "unsupported caps %" GST_PTR_FORMAT, caps);
974     ret = FALSE;
975   }
976   GST_ASS_RENDER_UNLOCK (render);
977 
978   return ret;
979 
980   /* ERRORS */
981 invalid_caps:
982   {
983     GST_ERROR_OBJECT (render, "could not parse caps");
984     return FALSE;
985   }
986 }
987 
988 static gboolean
gst_ass_render_setcaps_text(GstPad * pad,GstAssRender * render,GstCaps * caps)989 gst_ass_render_setcaps_text (GstPad * pad, GstAssRender * render,
990     GstCaps * caps)
991 {
992   GstStructure *structure;
993   const GValue *value;
994   GstBuffer *priv;
995   GstMapInfo map;
996   gboolean ret = FALSE;
997 
998   structure = gst_caps_get_structure (caps, 0);
999 
1000   GST_DEBUG_OBJECT (render, "text pad linked with caps:  %" GST_PTR_FORMAT,
1001       caps);
1002 
1003   value = gst_structure_get_value (structure, "codec_data");
1004 
1005   g_mutex_lock (&render->ass_mutex);
1006   if (value != NULL) {
1007     priv = gst_value_get_buffer (value);
1008     g_return_val_if_fail (priv != NULL, FALSE);
1009 
1010     gst_buffer_map (priv, &map, GST_MAP_READ);
1011 
1012     if (!render->ass_track)
1013       render->ass_track = ass_new_track (render->ass_library);
1014 
1015     ass_process_codec_private (render->ass_track, (char *) map.data, map.size);
1016 
1017     gst_buffer_unmap (priv, &map);
1018 
1019     GST_DEBUG_OBJECT (render, "ass track created");
1020 
1021     render->track_init_ok = TRUE;
1022 
1023     ret = TRUE;
1024   } else if (!render->ass_track) {
1025     render->ass_track = ass_new_track (render->ass_library);
1026 
1027     render->track_init_ok = TRUE;
1028 
1029     ret = TRUE;
1030   }
1031   g_mutex_unlock (&render->ass_mutex);
1032 
1033   return ret;
1034 }
1035 
1036 
1037 static void
gst_ass_render_process_text(GstAssRender * render,GstBuffer * buffer,GstClockTime running_time,GstClockTime duration)1038 gst_ass_render_process_text (GstAssRender * render, GstBuffer * buffer,
1039     GstClockTime running_time, GstClockTime duration)
1040 {
1041   GstMapInfo map;
1042   gdouble pts_start, pts_end;
1043 
1044   pts_start = running_time;
1045   pts_start /= GST_MSECOND;
1046   pts_end = duration;
1047   pts_end /= GST_MSECOND;
1048 
1049   GST_DEBUG_OBJECT (render,
1050       "Processing subtitles with running time %" GST_TIME_FORMAT
1051       " and duration %" GST_TIME_FORMAT, GST_TIME_ARGS (running_time),
1052       GST_TIME_ARGS (duration));
1053 
1054   gst_buffer_map (buffer, &map, GST_MAP_READ);
1055 
1056   g_mutex_lock (&render->ass_mutex);
1057   ass_process_chunk (render->ass_track, (gchar *) map.data, map.size,
1058       pts_start, pts_end);
1059   g_mutex_unlock (&render->ass_mutex);
1060 
1061   gst_buffer_unmap (buffer, &map);
1062 }
1063 
1064 static GstVideoOverlayComposition *
gst_ass_render_composite_overlay(GstAssRender * render,ASS_Image * images)1065 gst_ass_render_composite_overlay (GstAssRender * render, ASS_Image * images)
1066 {
1067   GstVideoOverlayComposition *composition;
1068   GstVideoOverlayRectangle *rectangle;
1069   GstVideoMeta *vmeta;
1070   GstMapInfo map;
1071   GstBuffer *buffer;
1072   ASS_Image *image;
1073   gint min_x, min_y;
1074   gint max_x, max_y;
1075   gint width, height;
1076   gint stride;
1077   gdouble hscale, vscale;
1078   gpointer data;
1079 
1080   min_x = G_MAXINT;
1081   min_y = G_MAXINT;
1082   max_x = 0;
1083   max_y = 0;
1084 
1085   /* find bounding box of all images, to limit the overlay rectangle size */
1086   for (image = images; image; image = image->next) {
1087     if (min_x > image->dst_x)
1088       min_x = image->dst_x;
1089     if (min_y > image->dst_y)
1090       min_y = image->dst_y;
1091     if (max_x < image->dst_x + image->w)
1092       max_x = image->dst_x + image->w;
1093     if (max_y < image->dst_y + image->h)
1094       max_y = image->dst_y + image->h;
1095   }
1096 
1097   width = MIN (max_x - min_x, render->ass_frame_width);
1098   height = MIN (max_y - min_y, render->ass_frame_height);
1099 
1100   GST_DEBUG_OBJECT (render, "render overlay rectangle %dx%d%+d%+d",
1101       width, height, min_x, min_y);
1102 
1103   buffer = gst_buffer_new_and_alloc (4 * width * height);
1104   if (!buffer) {
1105     GST_ERROR_OBJECT (render, "Failed to allocate overlay buffer");
1106     return NULL;
1107   }
1108 
1109   vmeta = gst_buffer_add_video_meta (buffer, GST_VIDEO_FRAME_FLAG_NONE,
1110       GST_VIDEO_OVERLAY_COMPOSITION_FORMAT_RGB, width, height);
1111 
1112   if (!gst_video_meta_map (vmeta, 0, &map, &data, &stride, GST_MAP_READWRITE)) {
1113     GST_ERROR_OBJECT (render, "Failed to map overlay buffer");
1114     gst_buffer_unref (buffer);
1115     return NULL;
1116   }
1117 
1118   blit_bgra_premultiplied (render, images, data, width, height, stride,
1119       -min_x, -min_y);
1120   gst_video_meta_unmap (vmeta, 0, &map);
1121 
1122   hscale = (gdouble) render->info.width / (gdouble) render->ass_frame_width;
1123   vscale = (gdouble) render->info.height / (gdouble) render->ass_frame_height;
1124 
1125   rectangle = gst_video_overlay_rectangle_new_raw (buffer,
1126       hscale * min_x, vscale * min_y, hscale * width, vscale * height,
1127       GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA);
1128 
1129   gst_buffer_unref (buffer);
1130 
1131   composition = gst_video_overlay_composition_new (rectangle);
1132   gst_video_overlay_rectangle_unref (rectangle);
1133 
1134   return composition;
1135 }
1136 
1137 static gboolean
gst_ass_render_push_frame(GstAssRender * render,GstBuffer * video_frame)1138 gst_ass_render_push_frame (GstAssRender * render, GstBuffer * video_frame)
1139 {
1140   GstVideoFrame frame;
1141 
1142   if (!render->composition)
1143     goto done;
1144 
1145   video_frame = gst_buffer_make_writable (video_frame);
1146 
1147   if (render->attach_compo_to_buffer) {
1148     gst_buffer_add_video_overlay_composition_meta (video_frame,
1149         render->composition);
1150     goto done;
1151   }
1152 
1153   if (!gst_video_frame_map (&frame, &render->info, video_frame,
1154           GST_MAP_READWRITE)) {
1155     GST_WARNING_OBJECT (render, "failed to map video frame for blending");
1156     goto done;
1157   }
1158 
1159   gst_video_overlay_composition_blend (render->composition, &frame);
1160   gst_video_frame_unmap (&frame);
1161 
1162 done:
1163   return gst_pad_push (render->srcpad, video_frame);
1164 }
1165 
1166 static GstFlowReturn
gst_ass_render_chain_video(GstPad * pad,GstObject * parent,GstBuffer * buffer)1167 gst_ass_render_chain_video (GstPad * pad, GstObject * parent,
1168     GstBuffer * buffer)
1169 {
1170   GstAssRender *render = GST_ASS_RENDER (parent);
1171   GstFlowReturn ret = GST_FLOW_OK;
1172   gboolean in_seg = FALSE;
1173   guint64 start, stop, clip_start = 0, clip_stop = 0;
1174   ASS_Image *ass_image;
1175   guint n = 0;
1176 
1177   if (gst_pad_check_reconfigure (render->srcpad)) {
1178     if (!gst_ass_render_negotiate (render, NULL)) {
1179       gst_pad_mark_reconfigure (render->srcpad);
1180       if (GST_PAD_IS_FLUSHING (render->srcpad))
1181         goto flushing_no_unlock;
1182       else
1183         goto not_negotiated;
1184     }
1185   }
1186 
1187   if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
1188     goto missing_timestamp;
1189 
1190   /* ignore buffers that are outside of the current segment */
1191   start = GST_BUFFER_TIMESTAMP (buffer);
1192 
1193   if (!GST_BUFFER_DURATION_IS_VALID (buffer)) {
1194     stop = GST_CLOCK_TIME_NONE;
1195   } else {
1196     stop = start + GST_BUFFER_DURATION (buffer);
1197   }
1198 
1199   /* segment_clip() will adjust start unconditionally to segment_start if
1200    * no stop time is provided, so handle this ourselves */
1201   if (stop == GST_CLOCK_TIME_NONE && start < render->video_segment.start)
1202     goto out_of_segment;
1203 
1204   in_seg =
1205       gst_segment_clip (&render->video_segment, GST_FORMAT_TIME, start, stop,
1206       &clip_start, &clip_stop);
1207 
1208   if (!in_seg)
1209     goto out_of_segment;
1210 
1211   /* if the buffer is only partially in the segment, fix up stamps */
1212   if (clip_start != start || (stop != -1 && clip_stop != stop)) {
1213     GST_DEBUG_OBJECT (render, "clipping buffer timestamp/duration to segment");
1214     buffer = gst_buffer_make_writable (buffer);
1215     GST_BUFFER_TIMESTAMP (buffer) = clip_start;
1216     if (stop != -1)
1217       GST_BUFFER_DURATION (buffer) = clip_stop - clip_start;
1218   }
1219 
1220   /* now, after we've done the clipping, fix up end time if there's no
1221    * duration (we only use those estimated values internally though, we
1222    * don't want to set bogus values on the buffer itself) */
1223   if (stop == -1) {
1224     if (render->info.fps_n && render->info.fps_d) {
1225       GST_DEBUG_OBJECT (render, "estimating duration based on framerate");
1226       stop =
1227           start + gst_util_uint64_scale_int (GST_SECOND, render->info.fps_d,
1228           render->info.fps_n);
1229     } else {
1230       GST_WARNING_OBJECT (render, "no duration, assuming minimal duration");
1231       stop = start + 1;         /* we need to assume some interval */
1232     }
1233   }
1234 
1235 wait_for_text_buf:
1236 
1237   GST_ASS_RENDER_LOCK (render);
1238 
1239   if (render->video_flushing)
1240     goto flushing;
1241 
1242   if (render->video_eos)
1243     goto have_eos;
1244 
1245   if (render->renderer_init_ok && render->track_init_ok && render->enable) {
1246     /* Text pad linked, check if we have a text buffer queued */
1247     if (render->subtitle_pending) {
1248       GSList *subtitle_pending = render->subtitle_pending;
1249       GstClockTime text_start = GST_CLOCK_TIME_NONE;
1250       GstClockTime text_end = GST_CLOCK_TIME_NONE;
1251       GstClockTime text_running_time = GST_CLOCK_TIME_NONE;
1252       GstClockTime text_running_time_end = GST_CLOCK_TIME_NONE;
1253       GstClockTime vid_running_time, vid_running_time_end;
1254       gdouble timestamp;
1255       gint changed = 0;
1256 
1257       vid_running_time =
1258           gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
1259           start);
1260       vid_running_time_end =
1261           gst_segment_to_running_time (&render->video_segment, GST_FORMAT_TIME,
1262           stop);
1263 
1264       GST_LOG_OBJECT (render, "V : %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT,
1265           GST_TIME_ARGS (vid_running_time),
1266           GST_TIME_ARGS (vid_running_time_end));
1267 
1268       if (subtitle_pending == NULL)
1269         GST_LOG_OBJECT (render, "T : no pending subtitles");
1270 
1271       while (subtitle_pending != NULL) {
1272         ++n;
1273 
1274         /* if the text buffer isn't stamped right, pop it off the
1275          * queue and display it for the current video frame only */
1276         if (!GST_BUFFER_TIMESTAMP_IS_VALID (subtitle_pending->data) ||
1277             !GST_BUFFER_DURATION_IS_VALID (subtitle_pending->data)) {
1278           GSList *bad = subtitle_pending;
1279           GST_WARNING_OBJECT (render,
1280               "Got text buffer with invalid timestamp or duration %"
1281               GST_PTR_FORMAT, bad->data);
1282           gst_buffer_unref (bad->data);
1283           subtitle_pending = bad->next;
1284           render->subtitle_pending =
1285               g_slist_delete_link (render->subtitle_pending, bad);
1286           GST_ASS_RENDER_BROADCAST (render);
1287           continue;
1288         }
1289 
1290         text_start = GST_BUFFER_TIMESTAMP (subtitle_pending->data);
1291         text_end = text_start + GST_BUFFER_DURATION (subtitle_pending->data);
1292 
1293         /* If timestamp and duration are valid */
1294         text_running_time =
1295             gst_segment_to_running_time (&render->subtitle_segment,
1296             GST_FORMAT_TIME, text_start);
1297         text_running_time_end =
1298             gst_segment_to_running_time (&render->subtitle_segment,
1299             GST_FORMAT_TIME, text_end);
1300 
1301         GST_LOG_OBJECT (render, "T%u: %" GST_TIME_FORMAT " - "
1302             "%" GST_TIME_FORMAT, n, GST_TIME_ARGS (text_running_time),
1303             GST_TIME_ARGS (text_running_time_end));
1304 
1305         /* Text too old */
1306         if (text_running_time_end <= vid_running_time) {
1307           GSList *old = subtitle_pending;
1308           GST_DEBUG_OBJECT (render,
1309               "text buffer too old, popping %" GST_PTR_FORMAT, old->data);
1310           gst_buffer_unref (old->data);
1311           subtitle_pending = old->next;
1312           render->subtitle_pending =
1313               g_slist_delete_link (render->subtitle_pending, old);
1314           GST_ASS_RENDER_BROADCAST (render);
1315           continue;
1316         }
1317 
1318         if (render->need_process) {
1319           GST_DEBUG_OBJECT (render, "process text buffer");
1320           gst_ass_render_process_text (render, subtitle_pending->data,
1321               text_running_time, text_running_time_end - text_running_time);
1322         }
1323 
1324         subtitle_pending = subtitle_pending->next;
1325       }
1326 
1327       if (render->need_process) {
1328         render->need_process = FALSE;
1329       }
1330 
1331       GST_ASS_RENDER_UNLOCK (render);
1332 
1333       /* libass needs timestamps in ms */
1334       timestamp = vid_running_time / GST_MSECOND;
1335 
1336       g_mutex_lock (&render->ass_mutex);
1337       ass_image = ass_render_frame (render->ass_renderer, render->ass_track,
1338           timestamp, &changed);
1339       g_mutex_unlock (&render->ass_mutex);
1340 
1341       if ((!ass_image || changed) && render->composition) {
1342         GST_DEBUG_OBJECT (render, "release overlay (changed %d)", changed);
1343         gst_ass_render_reset_composition (render);
1344       }
1345 
1346       if (ass_image != NULL) {
1347         if (!render->composition)
1348           render->composition = gst_ass_render_composite_overlay (render,
1349               ass_image);
1350       } else {
1351         GST_DEBUG_OBJECT (render, "nothing to render right now");
1352       }
1353 
1354       /* Push the video frame */
1355       ret = gst_ass_render_push_frame (render, buffer);
1356 
1357       subtitle_pending = render->subtitle_pending;
1358       while (subtitle_pending != NULL) {
1359 
1360         text_start = GST_BUFFER_TIMESTAMP (subtitle_pending->data);
1361         text_end = text_start + GST_BUFFER_DURATION (subtitle_pending->data);
1362 
1363         text_running_time_end =
1364             gst_segment_to_running_time (&render->video_segment,
1365             GST_FORMAT_TIME, text_end);
1366 
1367         if (text_running_time_end <= vid_running_time_end) {
1368           GSList *old = subtitle_pending;
1369           GST_DEBUG_OBJECT (render,
1370               "finished text buffer, popping %" GST_PTR_FORMAT, old->data);
1371           GST_ASS_RENDER_LOCK (render);
1372           gst_buffer_unref (old->data);
1373           subtitle_pending = old->next;
1374           render->subtitle_pending =
1375               g_slist_delete_link (render->subtitle_pending, old);
1376           GST_ASS_RENDER_BROADCAST (render);
1377           GST_ASS_RENDER_UNLOCK (render);
1378           render->need_process = TRUE;
1379           if (g_slist_length (render->subtitle_pending) == 0) {
1380             render->need_process = FALSE;
1381           }
1382         } else {
1383           subtitle_pending = subtitle_pending->next;
1384         }
1385       }
1386     } else {
1387       gboolean wait_for_text_buf = TRUE;
1388 
1389       if (render->subtitle_eos)
1390         wait_for_text_buf = FALSE;
1391 
1392       if (!render->wait_text)
1393         wait_for_text_buf = FALSE;
1394 
1395       /* Text pad linked, but no text buffer available - what now? */
1396       if (render->subtitle_segment.format == GST_FORMAT_TIME) {
1397         GstClockTime text_start_running_time, text_last_stop_running_time;
1398         GstClockTime vid_running_time;
1399 
1400         vid_running_time =
1401             gst_segment_to_running_time (&render->video_segment,
1402             GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buffer));
1403         text_start_running_time =
1404             gst_segment_to_running_time (&render->subtitle_segment,
1405             GST_FORMAT_TIME, render->subtitle_segment.start);
1406         text_last_stop_running_time =
1407             gst_segment_to_running_time (&render->subtitle_segment,
1408             GST_FORMAT_TIME, render->subtitle_segment.position);
1409 
1410         if ((GST_CLOCK_TIME_IS_VALID (text_start_running_time) &&
1411                 vid_running_time < text_start_running_time) ||
1412             (GST_CLOCK_TIME_IS_VALID (text_last_stop_running_time) &&
1413                 vid_running_time < text_last_stop_running_time)) {
1414           wait_for_text_buf = FALSE;
1415         }
1416       }
1417 
1418       if (wait_for_text_buf) {
1419         GST_DEBUG_OBJECT (render, "no text buffer, need to wait for one");
1420         GST_ASS_RENDER_WAIT (render);
1421         GST_DEBUG_OBJECT (render, "resuming");
1422         GST_ASS_RENDER_UNLOCK (render);
1423         goto wait_for_text_buf;
1424       } else {
1425         GST_ASS_RENDER_UNLOCK (render);
1426         GST_LOG_OBJECT (render, "no need to wait for a text buffer");
1427         ret = gst_pad_push (render->srcpad, buffer);
1428       }
1429     }
1430   } else {
1431     GST_LOG_OBJECT (render, "rendering disabled, doing buffer passthrough");
1432 
1433     GST_ASS_RENDER_UNLOCK (render);
1434     ret = gst_pad_push (render->srcpad, buffer);
1435     return ret;
1436   }
1437 
1438   GST_DEBUG_OBJECT (render, "leaving chain for buffer %p ret=%d", buffer, ret);
1439 
1440   /* Update last_stop */
1441   render->video_segment.position = clip_start;
1442 
1443   return ret;
1444 
1445 missing_timestamp:
1446   {
1447     GST_WARNING_OBJECT (render, "buffer without timestamp, discarding");
1448     gst_buffer_unref (buffer);
1449     return GST_FLOW_OK;
1450   }
1451 not_negotiated:
1452   {
1453     GST_DEBUG_OBJECT (render, "not negotiated");
1454     gst_buffer_unref (buffer);
1455     return GST_FLOW_NOT_NEGOTIATED;
1456   }
1457 flushing:
1458   {
1459     GST_ASS_RENDER_UNLOCK (render);
1460   }
1461 flushing_no_unlock:
1462   {
1463     GST_DEBUG_OBJECT (render, "flushing, discarding buffer");
1464     gst_buffer_unref (buffer);
1465     return GST_FLOW_FLUSHING;
1466   }
1467 have_eos:
1468   {
1469     GST_ASS_RENDER_UNLOCK (render);
1470     GST_DEBUG_OBJECT (render, "eos, discarding buffer");
1471     gst_buffer_unref (buffer);
1472     return GST_FLOW_EOS;
1473   }
1474 out_of_segment:
1475   {
1476     GST_DEBUG_OBJECT (render, "buffer out of segment, discarding");
1477     gst_buffer_unref (buffer);
1478     return GST_FLOW_OK;
1479   }
1480 }
1481 
1482 static GstFlowReturn
gst_ass_render_chain_text(GstPad * pad,GstObject * parent,GstBuffer * buffer)1483 gst_ass_render_chain_text (GstPad * pad, GstObject * parent, GstBuffer * buffer)
1484 {
1485   GstFlowReturn ret = GST_FLOW_OK;
1486   GstAssRender *render = GST_ASS_RENDER (parent);
1487   gboolean in_seg = FALSE;
1488   guint64 clip_start = 0, clip_stop = 0;
1489 
1490   GST_DEBUG_OBJECT (render, "entering chain for buffer %" GST_PTR_FORMAT,
1491       buffer);
1492 
1493   GST_ASS_RENDER_LOCK (render);
1494 
1495   if (render->subtitle_flushing) {
1496     GST_ASS_RENDER_UNLOCK (render);
1497     ret = GST_FLOW_FLUSHING;
1498     GST_LOG_OBJECT (render, "text flushing");
1499     goto beach;
1500   }
1501 
1502   if (render->subtitle_eos) {
1503     GST_ASS_RENDER_UNLOCK (render);
1504     ret = GST_FLOW_EOS;
1505     GST_LOG_OBJECT (render, "text EOS");
1506     goto beach;
1507   }
1508 
1509   if (G_LIKELY (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))) {
1510     GstClockTime stop;
1511 
1512     if (G_LIKELY (GST_BUFFER_DURATION_IS_VALID (buffer)))
1513       stop = GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer);
1514     else
1515       stop = GST_CLOCK_TIME_NONE;
1516 
1517     in_seg = gst_segment_clip (&render->subtitle_segment, GST_FORMAT_TIME,
1518         GST_BUFFER_TIMESTAMP (buffer), stop, &clip_start, &clip_stop);
1519   } else {
1520     in_seg = TRUE;
1521   }
1522 
1523   if (in_seg) {
1524     if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
1525       GST_BUFFER_TIMESTAMP (buffer) = clip_start;
1526     else if (GST_BUFFER_DURATION_IS_VALID (buffer))
1527       GST_BUFFER_DURATION (buffer) = clip_stop - clip_start;
1528 
1529     if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
1530       render->subtitle_segment.position = clip_start;
1531 
1532     GST_DEBUG_OBJECT (render, "New buffer arrived %" GST_PTR_FORMAT, buffer);
1533     render->subtitle_pending = g_slist_append (render->subtitle_pending,
1534         gst_buffer_ref (buffer));
1535     render->need_process = TRUE;
1536 
1537     /* in case the video chain is waiting for a text buffer, wake it up */
1538     GST_ASS_RENDER_BROADCAST (render);
1539   }
1540 
1541   GST_ASS_RENDER_UNLOCK (render);
1542 
1543 beach:
1544   GST_DEBUG_OBJECT (render, "leaving chain for buffer %p", buffer);
1545 
1546   gst_buffer_unref (buffer);
1547   return ret;
1548 }
1549 
1550 static void
gst_ass_render_handle_tag_sample(GstAssRender * render,GstSample * sample)1551 gst_ass_render_handle_tag_sample (GstAssRender * render, GstSample * sample)
1552 {
1553   static const gchar *mimetypes[] = {
1554     "application/x-font-ttf",
1555     "application/x-font-otf",
1556     "application/x-truetype-font",
1557     "application/vnd.ms-opentype",
1558     "font/ttf",
1559     "font/otf",
1560     "font/sfnt",
1561     "font/collection"
1562   };
1563   static const gchar *extensions[] = {
1564     ".otf",
1565     ".ttf",
1566     ".ttc"
1567   };
1568 
1569   GstBuffer *buf;
1570   const GstStructure *structure;
1571   gboolean valid_mimetype, valid_extension;
1572   guint i;
1573   const gchar *mimetype, *filename;
1574 
1575   buf = gst_sample_get_buffer (sample);
1576   structure = gst_sample_get_info (sample);
1577 
1578   if (!buf || !structure)
1579     return;
1580 
1581   filename = gst_structure_get_string (structure, "filename");
1582   if (!filename)
1583     return;
1584 
1585   valid_mimetype = FALSE;
1586   valid_extension = FALSE;
1587 
1588   mimetype = gst_structure_get_string (structure, "mimetype");
1589   if (mimetype) {
1590     for (i = 0; i < G_N_ELEMENTS (mimetypes); i++) {
1591       if (strcmp (mimetype, mimetypes[i]) == 0) {
1592         valid_mimetype = TRUE;
1593         break;
1594       }
1595     }
1596   }
1597 
1598   if (!valid_mimetype) {
1599     guint len = strlen (filename);
1600     const gchar *extension = filename + len - 4;
1601     for (i = 0; i < G_N_ELEMENTS (extensions); i++) {
1602       if (g_ascii_strcasecmp (extension, extensions[i]) == 0) {
1603         valid_extension = TRUE;
1604         break;
1605       }
1606     }
1607   }
1608 
1609   if (valid_mimetype || valid_extension) {
1610     GstMapInfo map;
1611 
1612     g_mutex_lock (&render->ass_mutex);
1613     gst_buffer_map (buf, &map, GST_MAP_READ);
1614     ass_add_font (render->ass_library, (gchar *) filename,
1615         (gchar *) map.data, map.size);
1616     gst_buffer_unmap (buf, &map);
1617     GST_DEBUG_OBJECT (render, "registered new font %s", filename);
1618     g_mutex_unlock (&render->ass_mutex);
1619   }
1620 }
1621 
1622 static void
gst_ass_render_handle_tags(GstAssRender * render,GstTagList * taglist)1623 gst_ass_render_handle_tags (GstAssRender * render, GstTagList * taglist)
1624 {
1625   guint tag_size;
1626 
1627   if (!taglist)
1628     return;
1629 
1630   tag_size = gst_tag_list_get_tag_size (taglist, GST_TAG_ATTACHMENT);
1631   if (tag_size > 0 && render->embeddedfonts) {
1632     guint index;
1633     GstSample *sample;
1634 
1635     GST_DEBUG_OBJECT (render, "TAG event has attachments");
1636 
1637     for (index = 0; index < tag_size; index++) {
1638       if (gst_tag_list_get_sample_index (taglist, GST_TAG_ATTACHMENT, index,
1639               &sample)) {
1640         gst_ass_render_handle_tag_sample (render, sample);
1641         gst_sample_unref (sample);
1642       }
1643     }
1644   }
1645 }
1646 
1647 static gboolean
gst_ass_render_event_video(GstPad * pad,GstObject * parent,GstEvent * event)1648 gst_ass_render_event_video (GstPad * pad, GstObject * parent, GstEvent * event)
1649 {
1650   gboolean ret = FALSE;
1651   GstAssRender *render = GST_ASS_RENDER (parent);
1652 
1653   GST_DEBUG_OBJECT (pad, "received video event %" GST_PTR_FORMAT, event);
1654 
1655   switch (GST_EVENT_TYPE (event)) {
1656     case GST_EVENT_CAPS:
1657     {
1658       GstCaps *caps;
1659 
1660       gst_event_parse_caps (event, &caps);
1661       ret = gst_ass_render_setcaps_video (pad, render, caps);
1662       gst_event_unref (event);
1663       break;
1664     }
1665     case GST_EVENT_SEGMENT:
1666     {
1667       GstSegment segment;
1668 
1669       GST_DEBUG_OBJECT (render, "received new segment");
1670 
1671       gst_event_copy_segment (event, &segment);
1672 
1673       if (segment.format == GST_FORMAT_TIME) {
1674         GST_DEBUG_OBJECT (render, "VIDEO SEGMENT now: %" GST_SEGMENT_FORMAT,
1675             &render->video_segment);
1676 
1677         render->video_segment = segment;
1678 
1679         GST_DEBUG_OBJECT (render, "VIDEO SEGMENT after: %" GST_SEGMENT_FORMAT,
1680             &render->video_segment);
1681         ret = gst_pad_event_default (pad, parent, event);
1682       } else {
1683         GST_ELEMENT_WARNING (render, STREAM, MUX, (NULL),
1684             ("received non-TIME newsegment event on video input"));
1685         ret = FALSE;
1686         gst_event_unref (event);
1687       }
1688       break;
1689     }
1690     case GST_EVENT_TAG:
1691     {
1692       GstTagList *taglist = NULL;
1693 
1694       /* tag events may contain attachments which might be fonts */
1695       GST_DEBUG_OBJECT (render, "got TAG event");
1696 
1697       gst_event_parse_tag (event, &taglist);
1698       gst_ass_render_handle_tags (render, taglist);
1699       ret = gst_pad_event_default (pad, parent, event);
1700       break;
1701     }
1702     case GST_EVENT_EOS:
1703       GST_ASS_RENDER_LOCK (render);
1704       GST_INFO_OBJECT (render, "video EOS");
1705       render->video_eos = TRUE;
1706       GST_ASS_RENDER_UNLOCK (render);
1707       ret = gst_pad_event_default (pad, parent, event);
1708       break;
1709     case GST_EVENT_FLUSH_START:
1710       GST_ASS_RENDER_LOCK (render);
1711       GST_INFO_OBJECT (render, "video flush start");
1712       render->video_flushing = TRUE;
1713       GST_ASS_RENDER_BROADCAST (render);
1714       GST_ASS_RENDER_UNLOCK (render);
1715       ret = gst_pad_event_default (pad, parent, event);
1716       break;
1717     case GST_EVENT_FLUSH_STOP:
1718       GST_ASS_RENDER_LOCK (render);
1719       GST_INFO_OBJECT (render, "video flush stop");
1720       render->video_flushing = FALSE;
1721       render->video_eos = FALSE;
1722       gst_segment_init (&render->video_segment, GST_FORMAT_TIME);
1723       GST_ASS_RENDER_UNLOCK (render);
1724       ret = gst_pad_event_default (pad, parent, event);
1725       break;
1726     default:
1727       ret = gst_pad_event_default (pad, parent, event);
1728       break;
1729   }
1730 
1731   return ret;
1732 }
1733 
1734 static gboolean
gst_ass_render_query_video(GstPad * pad,GstObject * parent,GstQuery * query)1735 gst_ass_render_query_video (GstPad * pad, GstObject * parent, GstQuery * query)
1736 {
1737   gboolean res = FALSE;
1738 
1739   switch (GST_QUERY_TYPE (query)) {
1740     case GST_QUERY_CAPS:
1741     {
1742       GstCaps *filter, *caps;
1743 
1744       gst_query_parse_caps (query, &filter);
1745       caps =
1746           gst_ass_render_get_videosink_caps (pad, (GstAssRender *) parent,
1747           filter);
1748       gst_query_set_caps_result (query, caps);
1749       gst_caps_unref (caps);
1750       res = TRUE;
1751       break;
1752     }
1753     default:
1754       res = gst_pad_query_default (pad, parent, query);
1755       break;
1756   }
1757 
1758   return res;
1759 }
1760 
1761 static gboolean
gst_ass_render_event_text(GstPad * pad,GstObject * parent,GstEvent * event)1762 gst_ass_render_event_text (GstPad * pad, GstObject * parent, GstEvent * event)
1763 {
1764   gboolean ret = FALSE;
1765   GstAssRender *render = GST_ASS_RENDER (parent);
1766 
1767   GST_DEBUG_OBJECT (pad, "received text event %" GST_PTR_FORMAT, event);
1768 
1769   switch (GST_EVENT_TYPE (event)) {
1770     case GST_EVENT_CAPS:
1771     {
1772       GstCaps *caps;
1773 
1774       gst_event_parse_caps (event, &caps);
1775       ret = gst_ass_render_setcaps_text (pad, render, caps);
1776       gst_event_unref (event);
1777       break;
1778     }
1779     case GST_EVENT_SEGMENT:
1780     {
1781       GstSegment segment;
1782 
1783       GST_ASS_RENDER_LOCK (render);
1784       render->subtitle_eos = FALSE;
1785       GST_ASS_RENDER_UNLOCK (render);
1786 
1787       gst_event_copy_segment (event, &segment);
1788 
1789       GST_ASS_RENDER_LOCK (render);
1790       if (segment.format == GST_FORMAT_TIME) {
1791         GST_DEBUG_OBJECT (render, "TEXT SEGMENT now: %" GST_SEGMENT_FORMAT,
1792             &render->subtitle_segment);
1793 
1794         render->subtitle_segment = segment;
1795 
1796         GST_DEBUG_OBJECT (render,
1797             "TEXT SEGMENT after: %" GST_SEGMENT_FORMAT,
1798             &render->subtitle_segment);
1799       } else {
1800         GST_ELEMENT_WARNING (render, STREAM, MUX, (NULL),
1801             ("received non-TIME newsegment event on subtitle input"));
1802       }
1803 
1804       gst_event_unref (event);
1805       ret = TRUE;
1806 
1807       /* wake up the video chain, it might be waiting for a text buffer or
1808        * a text segment update */
1809       GST_ASS_RENDER_BROADCAST (render);
1810       GST_ASS_RENDER_UNLOCK (render);
1811       break;
1812     }
1813     case GST_EVENT_GAP:{
1814       GstClockTime start, duration;
1815 
1816       gst_event_parse_gap (event, &start, &duration);
1817       if (GST_CLOCK_TIME_IS_VALID (duration))
1818         start += duration;
1819       /* we do not expect another buffer until after gap,
1820        * so that is our position now */
1821       GST_ASS_RENDER_LOCK (render);
1822       render->subtitle_segment.position = start;
1823 
1824       /* wake up the video chain, it might be waiting for a text buffer or
1825        * a text segment update */
1826       GST_ASS_RENDER_BROADCAST (render);
1827       GST_ASS_RENDER_UNLOCK (render);
1828 
1829       gst_event_unref (event);
1830       ret = TRUE;
1831       break;
1832     }
1833     case GST_EVENT_FLUSH_STOP:
1834       g_mutex_lock (&render->ass_mutex);
1835       if (render->ass_track) {
1836         ass_flush_events (render->ass_track);
1837       }
1838       g_mutex_unlock (&render->ass_mutex);
1839       GST_ASS_RENDER_LOCK (render);
1840       GST_INFO_OBJECT (render, "text flush stop");
1841       render->subtitle_flushing = FALSE;
1842       render->subtitle_eos = FALSE;
1843       gst_ass_render_pop_text (render);
1844       gst_segment_init (&render->subtitle_segment, GST_FORMAT_TIME);
1845       GST_ASS_RENDER_UNLOCK (render);
1846       gst_event_unref (event);
1847       ret = TRUE;
1848       break;
1849     case GST_EVENT_FLUSH_START:
1850       GST_DEBUG_OBJECT (render, "text flush start");
1851       GST_ASS_RENDER_LOCK (render);
1852       render->subtitle_flushing = TRUE;
1853       GST_ASS_RENDER_BROADCAST (render);
1854       GST_ASS_RENDER_UNLOCK (render);
1855       gst_event_unref (event);
1856       ret = TRUE;
1857       break;
1858     case GST_EVENT_EOS:
1859       GST_ASS_RENDER_LOCK (render);
1860       render->subtitle_eos = TRUE;
1861       GST_INFO_OBJECT (render, "text EOS");
1862       /* wake up the video chain, it might be waiting for a text buffer or
1863        * a text segment update */
1864       GST_ASS_RENDER_BROADCAST (render);
1865       GST_ASS_RENDER_UNLOCK (render);
1866       gst_event_unref (event);
1867       ret = TRUE;
1868       break;
1869     case GST_EVENT_TAG:
1870     {
1871       GstTagList *taglist = NULL;
1872 
1873       /* tag events may contain attachments which might be fonts */
1874       GST_DEBUG_OBJECT (render, "got TAG event");
1875 
1876       gst_event_parse_tag (event, &taglist);
1877       gst_ass_render_handle_tags (render, taglist);
1878       ret = gst_pad_event_default (pad, parent, event);
1879       break;
1880     }
1881     default:
1882       ret = gst_pad_event_default (pad, parent, event);
1883       break;
1884   }
1885 
1886   return ret;
1887 }
1888 
1889 static gboolean
plugin_init(GstPlugin * plugin)1890 plugin_init (GstPlugin * plugin)
1891 {
1892   return GST_ELEMENT_REGISTER (assrender, plugin);
1893 }
1894 
1895 GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
1896     GST_VERSION_MINOR,
1897     assrender,
1898     "ASS/SSA subtitle renderer",
1899     plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
1900