1 /* GStreamer
2 * Copyright (C) <2020> Jan Schmidt <jan@centricular.com>
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
13 *
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
18 */
19
20 #ifdef HAVE_CONFIG_H
21 #include "config.h"
22 #endif
23
24 #include "gstdvbsubenc.h"
25 #include <string.h>
26
27 /**
28 * SECTION:element-dvbsubenc
29 * @title: dvbsubenc
30 * @see_also: dvbsuboverlay
31 *
32 * This element encodes AYUV video frames to DVB subpictures.
33 *
34 * ## Example pipelines
35 * |[
36 * gst-launch-1.0 videotestsrc num-buffers=900 ! video/x-raw,width=720,height=576,framerate=30/1 ! x264enc bitrate=500 ! h264parse ! mpegtsmux name=mux ! filesink location=test.ts filesrc location=test-subtitles.srt ! subparse ! textrender ! dvbsubenc ! mux.
37 * ]|
38 * Encode a test video signal and an SRT subtitle file to MPEG-TS with a DVB subpicture track
39 *
40 */
41
42 #define DEFAULT_MAX_COLOURS 16
43 #define DEFAULT_TS_OFFSET 0
44
45 enum
46 {
47 PROP_0,
48 PROP_MAX_COLOURS,
49 PROP_TS_OFFSET
50 };
51
52 #define gst_dvb_sub_enc_parent_class parent_class
53 G_DEFINE_TYPE (GstDvbSubEnc, gst_dvb_sub_enc, GST_TYPE_ELEMENT);
54 GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (dvbsubenc, "dvbsubenc", GST_RANK_NONE,
55 GST_TYPE_DVB_SUB_ENC, GST_DEBUG_CATEGORY_INIT (gst_dvb_sub_enc_debug,
56 "dvbsubenc", 0, "DVB subtitle encoder");
57 );
58
59 static void gst_dvb_sub_enc_get_property (GObject * object, guint prop_id,
60 GValue * value, GParamSpec * pspec);
61 static void gst_dvb_sub_enc_set_property (GObject * object, guint prop_id,
62 const GValue * value, GParamSpec * pspec);
63
64 static gboolean gst_dvb_sub_enc_src_event (GstPad * srcpad, GstObject * parent,
65 GstEvent * event);
66 static GstFlowReturn gst_dvb_sub_enc_chain (GstPad * pad, GstObject * parent,
67 GstBuffer * buf);
68
69 static void gst_dvb_sub_enc_finalize (GObject * gobject);
70 static gboolean gst_dvb_sub_enc_sink_event (GstPad * pad, GstObject * parent,
71 GstEvent * event);
72 static gboolean gst_dvb_sub_enc_sink_setcaps (GstPad * pad, GstCaps * caps);
73
74 static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
75 GST_PAD_SINK,
76 GST_PAD_ALWAYS,
77 GST_STATIC_CAPS ("video/x-raw, format = (string) { AYUV }")
78 );
79
80 static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
81 GST_PAD_SRC,
82 GST_PAD_ALWAYS,
83 GST_STATIC_CAPS ("subpicture/x-dvb")
84 );
85
86 GST_DEBUG_CATEGORY (gst_dvb_sub_enc_debug);
87
88 static void
gst_dvb_sub_enc_class_init(GstDvbSubEncClass * klass)89 gst_dvb_sub_enc_class_init (GstDvbSubEncClass * klass)
90 {
91 GObjectClass *gobject_class;
92 GstElementClass *gstelement_class;
93
94 gobject_class = (GObjectClass *) klass;
95 gstelement_class = (GstElementClass *) klass;
96
97 gobject_class->finalize = gst_dvb_sub_enc_finalize;
98
99 gst_element_class_add_static_pad_template (gstelement_class, &sink_template);
100 gst_element_class_add_static_pad_template (gstelement_class, &src_template);
101
102 gst_element_class_set_static_metadata (gstelement_class,
103 "DVB subtitle encoder", "Codec/Decoder/Video",
104 "Encodes AYUV video frames streams into DVB subtitles",
105 "Jan Schmidt <jan@centricular.com>");
106
107 gobject_class->set_property = gst_dvb_sub_enc_set_property;
108 gobject_class->get_property = gst_dvb_sub_enc_get_property;
109
110 /**
111 * GstDvbSubEnc:max-colours
112 *
113 * Set the maximum number of colours to output into the DVB subpictures.
114 * Good choices are 4, 16 or 256 - as they correspond to the 2-bit, 4-bit
115 * and 8-bit palette modes that the DVB subpicture encoding supports.
116 */
117 g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_MAX_COLOURS,
118 g_param_spec_int ("max-colours", "Maximum Colours",
119 "Maximum Number of Colours to output", 1, 256, DEFAULT_MAX_COLOURS,
120 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
121
122 /**
123 * GstDvbSubEnc:ts-offset
124 *
125 * Advance or delay the output subpicture time-line. This is a
126 * convenience property for setting the src pad offset.
127 */
128 g_object_class_install_property (gobject_class, PROP_TS_OFFSET,
129 g_param_spec_int64 ("ts-offset", "Subtitle Timestamp Offset",
130 "Apply an offset to incoming timestamps before output (in nanoseconds)",
131 G_MININT64, G_MAXINT64, 0,
132 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
133
134 }
135
136 static void
gst_dvb_sub_enc_init(GstDvbSubEnc * enc)137 gst_dvb_sub_enc_init (GstDvbSubEnc * enc)
138 {
139 GstPadTemplate *tmpl;
140
141 enc->sinkpad = gst_pad_new_from_static_template (&sink_template, "sink");
142 gst_pad_set_chain_function (enc->sinkpad,
143 GST_DEBUG_FUNCPTR (gst_dvb_sub_enc_chain));
144 gst_pad_set_event_function (enc->sinkpad,
145 GST_DEBUG_FUNCPTR (gst_dvb_sub_enc_sink_event));
146 gst_element_add_pad (GST_ELEMENT (enc), enc->sinkpad);
147
148 tmpl = gst_static_pad_template_get (&src_template);
149 enc->srcpad = gst_pad_new_from_template (tmpl, "src");
150 gst_pad_set_event_function (enc->srcpad,
151 GST_DEBUG_FUNCPTR (gst_dvb_sub_enc_src_event));
152 gst_pad_use_fixed_caps (enc->srcpad);
153 gst_object_unref (tmpl);
154 gst_element_add_pad (GST_ELEMENT (enc), enc->srcpad);
155
156 enc->max_colours = DEFAULT_MAX_COLOURS;
157 enc->ts_offset = DEFAULT_TS_OFFSET;
158
159 enc->current_end_time = GST_CLOCK_TIME_NONE;
160 }
161
162 static void
gst_dvb_sub_enc_finalize(GObject * gobject)163 gst_dvb_sub_enc_finalize (GObject * gobject)
164 {
165 //GstDvbSubEnc *enc = GST_DVB_SUB_ENC (gobject);
166
167 G_OBJECT_CLASS (parent_class)->finalize (gobject);
168 }
169
170 static void
gst_dvb_sub_enc_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)171 gst_dvb_sub_enc_get_property (GObject * object, guint prop_id,
172 GValue * value, GParamSpec * pspec)
173 {
174 GstDvbSubEnc *enc = GST_DVB_SUB_ENC (object);
175
176 switch (prop_id) {
177 case PROP_MAX_COLOURS:
178 g_value_set_int (value, enc->max_colours);
179 break;
180 case PROP_TS_OFFSET:
181 g_value_set_int64 (value, enc->ts_offset);
182 break;
183 default:
184 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
185 break;
186 }
187 }
188
189 static void
gst_dvb_sub_enc_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)190 gst_dvb_sub_enc_set_property (GObject * object, guint prop_id,
191 const GValue * value, GParamSpec * pspec)
192 {
193 GstDvbSubEnc *enc = GST_DVB_SUB_ENC (object);
194
195 switch (prop_id) {
196 case PROP_MAX_COLOURS:
197 enc->max_colours = g_value_get_int (value);
198 break;
199 case PROP_TS_OFFSET:
200 enc->ts_offset = g_value_get_int64 (value);
201 gst_pad_set_offset (enc->srcpad, enc->ts_offset);
202 break;
203 default:
204 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
205 break;
206 }
207 }
208
209 static gboolean
gst_dvb_sub_enc_src_event(GstPad * pad,GstObject * parent,GstEvent * event)210 gst_dvb_sub_enc_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
211 {
212 gboolean res = FALSE;
213
214 switch (GST_EVENT_TYPE (event)) {
215 default:
216 res = gst_pad_event_default (pad, parent, event);
217 break;
218 }
219
220 return res;
221 }
222
223 static void
find_largest_subregion(guint8 * pixels,guint stride,guint pixel_stride,gint width,gint height,guint * out_left,guint * out_right,guint * out_top,guint * out_bottom)224 find_largest_subregion (guint8 * pixels, guint stride, guint pixel_stride,
225 gint width, gint height, guint * out_left, guint * out_right,
226 guint * out_top, guint * out_bottom)
227 {
228 guint left = width, right = 0, top = height, bottom = 0;
229 gint y, x;
230 guint8 *p = pixels;
231
232 for (y = 0; y < height; y++) {
233 gboolean visible_pixels = FALSE;
234 guint8 *l = p;
235 guint8 *r = p + (width - 1) * pixel_stride;
236
237 for (x = 0; x < width; x++) {
238 /* AYUV data = byte 0 = A */
239 if (l[0] != 0) {
240 visible_pixels = TRUE;
241 left = MIN (left, x);
242 }
243 if (r[0] != 0) {
244 visible_pixels = TRUE;
245 right = MAX (right, width - 1 - x);
246 }
247
248 l += pixel_stride;
249 r -= pixel_stride;
250
251 if (l >= r) /* Stop when we've scanned to the middle */
252 break;
253 }
254
255 if (visible_pixels) {
256 if (top > y)
257 top = y;
258 if (bottom < y)
259 bottom = y;
260 }
261 p += stride;
262 }
263
264 *out_left = left;
265 *out_right = right;
266 *out_top = top;
267 *out_bottom = bottom;
268 }
269
270 /* Create and map a new buffer containing the indicated subregion of the input
271 * image, returning the result in the 'out' GstVideoFrame */
272 static gboolean
create_cropped_frame(GstDvbSubEnc * enc,GstVideoFrame * in,GstVideoFrame * out,guint x,guint y,guint width,guint height)273 create_cropped_frame (GstDvbSubEnc * enc, GstVideoFrame * in,
274 GstVideoFrame * out, guint x, guint y, guint width, guint height)
275 {
276 GstBuffer *cropped_buffer;
277 GstVideoInfo cropped_info;
278 guint8 *out_pixels, *in_pixels;
279 guint out_stride, in_stride, p_stride;
280 guint bottom = y + height;
281
282 g_return_val_if_fail (GST_VIDEO_INFO_FORMAT (&in->info) ==
283 GST_VIDEO_FORMAT_AYUV, FALSE);
284
285 gst_video_info_set_format (&cropped_info, GST_VIDEO_INFO_FORMAT (&in->info),
286 width, height);
287 cropped_buffer =
288 gst_buffer_new_allocate (NULL, GST_VIDEO_INFO_SIZE (&cropped_info), NULL);
289
290 if (!gst_video_frame_map (out, &cropped_info, cropped_buffer, GST_MAP_WRITE)) {
291 gst_buffer_unref (cropped_buffer);
292 return FALSE;
293 }
294
295 p_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (in, 0);
296 in_stride = GST_VIDEO_FRAME_PLANE_STRIDE (in, 0);
297 in_pixels = GST_VIDEO_FRAME_PLANE_DATA (in, 0);
298
299 out_stride = GST_VIDEO_FRAME_PLANE_STRIDE (out, 0);
300 out_pixels = GST_VIDEO_FRAME_PLANE_DATA (out, 0);
301
302 in_pixels += y * in_stride + x * p_stride;
303
304 while (y < bottom) {
305 memcpy (out_pixels, in_pixels, width * p_stride);
306
307 in_pixels += in_stride;
308 out_pixels += out_stride;
309 y++;
310 }
311
312 /* By mapping the video frame no ref, it takes ownership of the buffer and it will be released
313 * on unmap (if the map call succeeds) */
314 gst_video_frame_unmap (out);
315 if (!gst_video_frame_map (out, &cropped_info, cropped_buffer,
316 GST_MAP_READ | GST_VIDEO_FRAME_MAP_FLAG_NO_REF)) {
317 gst_buffer_unref (cropped_buffer);
318 return FALSE;
319 }
320 return TRUE;
321 }
322
323 static GstFlowReturn
process_largest_subregion(GstDvbSubEnc * enc,GstVideoFrame * vframe)324 process_largest_subregion (GstDvbSubEnc * enc, GstVideoFrame * vframe)
325 {
326 GstFlowReturn ret = GST_FLOW_ERROR;
327
328 guint8 *pixels = GST_VIDEO_FRAME_PLANE_DATA (vframe, 0);
329 guint stride = GST_VIDEO_FRAME_PLANE_STRIDE (vframe, 0);
330 guint pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (vframe, 0);
331 guint left, right, top, bottom;
332 GstBuffer *ayuv8p_buffer;
333 GstVideoInfo ayuv8p_info;
334 GstVideoFrame cropped_frame, ayuv8p_frame;
335 guint32 num_colours;
336 GstClockTime end_ts = GST_CLOCK_TIME_NONE, duration;
337
338 find_largest_subregion (pixels, stride, pixel_stride, enc->in_info.width,
339 enc->in_info.height, &left, &right, &top, &bottom);
340
341 GST_LOG_OBJECT (enc, "Found subregion %u,%u -> %u,%u w %u, %u", left, top,
342 right, bottom, right - left + 1, bottom - top + 1);
343
344 if (!create_cropped_frame (enc, vframe, &cropped_frame, left, top,
345 right - left + 1, bottom - top + 1)) {
346 GST_WARNING_OBJECT (enc, "Failed to map frame conversion input buffer");
347 goto fail;
348 }
349
350 /* FIXME: RGB8P is the same size as what we're building, so this is fine,
351 * but it'd be better if we had an explicit paletted format for YUV8P */
352 gst_video_info_set_format (&ayuv8p_info, GST_VIDEO_FORMAT_RGB8P,
353 right - left + 1, bottom - top + 1);
354 ayuv8p_buffer =
355 gst_buffer_new_allocate (NULL, GST_VIDEO_INFO_SIZE (&ayuv8p_info), NULL);
356
357 /* Mapped without extra ref - the frame now owns the only ref */
358 if (!gst_video_frame_map (&ayuv8p_frame, &ayuv8p_info, ayuv8p_buffer,
359 GST_MAP_WRITE | GST_VIDEO_FRAME_MAP_FLAG_NO_REF)) {
360 GST_WARNING_OBJECT (enc, "Failed to map frame conversion output buffer");
361 gst_video_frame_unmap (&cropped_frame);
362 gst_buffer_unref (ayuv8p_buffer);
363 goto fail;
364 }
365
366 if (!gst_dvbsubenc_ayuv_to_ayuv8p (&cropped_frame, &ayuv8p_frame,
367 enc->max_colours, &num_colours)) {
368 GST_ERROR_OBJECT (enc,
369 "Failed to convert subpicture region to paletted 8-bit");
370 gst_video_frame_unmap (&cropped_frame);
371 gst_video_frame_unmap (&ayuv8p_frame);
372 goto skip;
373 }
374
375 gst_video_frame_unmap (&cropped_frame);
376
377 duration = GST_BUFFER_DURATION (vframe->buffer);
378
379 if (GST_CLOCK_TIME_IS_VALID (duration)) {
380 end_ts = GST_BUFFER_PTS (vframe->buffer);
381 if (GST_CLOCK_TIME_IS_VALID (end_ts)) {
382 end_ts += duration;
383 }
384 }
385
386 /* Encode output buffer and push it */
387 {
388 SubpictureRect s;
389 GstBuffer *packet;
390
391 s.frame = &ayuv8p_frame;
392 s.nb_colours = num_colours;
393 s.x = left;
394 s.y = top;
395
396 packet = gst_dvbenc_encode (enc->object_version & 0xF, 1, &s, 1);
397 if (packet == NULL) {
398 gst_video_frame_unmap (&ayuv8p_frame);
399 goto fail;
400 }
401
402 enc->object_version++;
403
404 gst_buffer_copy_into (packet, vframe->buffer, GST_BUFFER_COPY_METADATA, 0,
405 -1);
406
407 if (!GST_BUFFER_DTS_IS_VALID (packet))
408 GST_BUFFER_DTS (packet) = GST_BUFFER_PTS (packet);
409
410 ret = gst_pad_push (enc->srcpad, packet);
411 }
412
413 if (GST_CLOCK_TIME_IS_VALID (end_ts)) {
414 GST_LOG_OBJECT (enc, "Scheduling subtitle end packet for %" GST_TIME_FORMAT,
415 GST_TIME_ARGS (end_ts));
416 enc->current_end_time = end_ts;
417 }
418
419 gst_video_frame_unmap (&ayuv8p_frame);
420
421 return ret;
422 skip:
423 return GST_FLOW_OK;
424 fail:
425 return GST_FLOW_ERROR;
426 }
427
428 static GstFlowReturn
gst_dvb_sub_enc_generate_end_packet(GstDvbSubEnc * enc,GstClockTime pts)429 gst_dvb_sub_enc_generate_end_packet (GstDvbSubEnc * enc, GstClockTime pts)
430 {
431 GstBuffer *packet;
432 GstFlowReturn ret;
433
434 if (!GST_CLOCK_TIME_IS_VALID (enc->current_end_time))
435 return GST_FLOW_OK;
436
437 if (enc->current_end_time >= pts)
438 return GST_FLOW_OK; /* Didn't hit the end of the current subtitle yet */
439
440 GST_DEBUG_OBJECT (enc, "Outputting end of page at TS %" GST_TIME_FORMAT,
441 GST_TIME_ARGS (enc->current_end_time));
442
443 packet = gst_dvbenc_encode (enc->object_version & 0xF, 1, NULL, 0);
444 if (packet == NULL) {
445 GST_ELEMENT_ERROR (enc, STREAM, FAILED,
446 ("Internal data stream error."),
447 ("Failed to encode end of subtitle packet"));
448 return GST_FLOW_ERROR;
449 }
450
451 enc->object_version++;
452
453 GST_BUFFER_DTS (packet) = GST_BUFFER_PTS (packet) = enc->current_end_time;
454 enc->current_end_time = GST_CLOCK_TIME_NONE;
455
456 ret = gst_pad_push (enc->srcpad, packet);
457
458 return ret;
459 }
460
461 static GstFlowReturn
gst_dvb_sub_enc_chain(GstPad * pad,GstObject * parent,GstBuffer * buf)462 gst_dvb_sub_enc_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
463 {
464 GstFlowReturn ret = GST_FLOW_OK;
465 GstDvbSubEnc *enc = GST_DVB_SUB_ENC (parent);
466 GstVideoFrame vframe;
467 GstClockTime pts = GST_BUFFER_PTS (buf);
468
469 GST_DEBUG_OBJECT (enc, "Have buffer of size %" G_GSIZE_FORMAT ", ts %"
470 GST_TIME_FORMAT ", dur %" G_GINT64_FORMAT, gst_buffer_get_size (buf),
471 GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), GST_BUFFER_DURATION (buf));
472
473 if (GST_CLOCK_TIME_IS_VALID (pts)) {
474 ret = gst_dvb_sub_enc_generate_end_packet (enc, pts);
475 if (ret != GST_FLOW_OK)
476 goto fail;
477 }
478
479 /* FIXME: Allow GstVideoOverlayComposition input, so we can directly encode the
480 * overlays passed */
481
482 /* Scan the input buffer for regions to encode */
483 /* FIXME: Could use the blob extents tracking code from OpenHMD here to collect
484 * multiple regions*/
485 if (!gst_video_frame_map (&vframe, &enc->in_info, buf, GST_MAP_READ)) {
486 GST_ERROR_OBJECT (enc, "Failed to map input buffer for reading");
487 ret = GST_FLOW_ERROR;
488 goto fail;
489 }
490
491 ret = process_largest_subregion (enc, &vframe);
492 gst_video_frame_unmap (&vframe);
493
494 fail:
495 gst_buffer_unref (buf);
496 return ret;
497 }
498
499 static gboolean
gst_dvb_sub_enc_sink_setcaps(GstPad * pad,GstCaps * caps)500 gst_dvb_sub_enc_sink_setcaps (GstPad * pad, GstCaps * caps)
501 {
502 GstDvbSubEnc *enc = GST_DVB_SUB_ENC (gst_pad_get_parent (pad));
503 gboolean ret = FALSE;
504 GstCaps *out_caps = NULL;
505
506 GST_DEBUG_OBJECT (enc, "setcaps called with %" GST_PTR_FORMAT, caps);
507 if (!gst_video_info_from_caps (&enc->in_info, caps)) {
508 GST_ERROR_OBJECT (enc, "Failed to parse input caps");
509 return FALSE;
510 }
511
512 out_caps = gst_caps_new_simple ("subpicture/x-dvb",
513 "width", G_TYPE_INT, enc->in_info.width,
514 "height", G_TYPE_INT, enc->in_info.height,
515 "framerate", GST_TYPE_FRACTION, enc->in_info.fps_n, enc->in_info.fps_d,
516 NULL);
517
518 if (!gst_pad_set_caps (enc->srcpad, out_caps)) {
519 GST_WARNING_OBJECT (enc, "failed setting downstream caps");
520 gst_caps_unref (out_caps);
521 goto beach;
522 }
523
524 gst_caps_unref (out_caps);
525 ret = TRUE;
526
527 beach:
528 gst_object_unref (enc);
529 return ret;
530 }
531
532 static gboolean
gst_dvb_sub_enc_sink_event(GstPad * pad,GstObject * parent,GstEvent * event)533 gst_dvb_sub_enc_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
534 {
535 GstDvbSubEnc *enc = GST_DVB_SUB_ENC (parent);
536 gboolean ret = FALSE;
537
538 GST_LOG_OBJECT (enc, "%s event", GST_EVENT_TYPE_NAME (event));
539
540 switch (GST_EVENT_TYPE (event)) {
541 case GST_EVENT_CAPS:
542 {
543 GstCaps *caps;
544
545 gst_event_parse_caps (event, &caps);
546 ret = gst_dvb_sub_enc_sink_setcaps (pad, caps);
547 gst_event_unref (event);
548 break;
549 }
550 case GST_EVENT_GAP:
551 {
552 GstClockTime start, duration;
553
554 gst_event_parse_gap (event, &start, &duration);
555 if (GST_CLOCK_TIME_IS_VALID (start)) {
556 if (GST_CLOCK_TIME_IS_VALID (duration))
557 start += duration;
558 /* we do not expect another buffer until after gap,
559 * so that is our position now */
560 GST_DEBUG_OBJECT (enc,
561 "Got GAP event, advancing time to %" GST_TIME_FORMAT,
562 GST_TIME_ARGS (start));
563 gst_dvb_sub_enc_generate_end_packet (enc, start);
564 } else {
565 GST_WARNING_OBJECT (enc, "Got GAP event with invalid position");
566 }
567
568 gst_event_unref (event);
569 ret = TRUE;
570 break;
571 }
572 case GST_EVENT_SEGMENT:
573 {
574 GstSegment seg;
575
576 gst_event_copy_segment (event, &seg);
577
578 ret = gst_pad_event_default (pad, parent, event);
579 break;
580 }
581 case GST_EVENT_FLUSH_STOP:{
582 enc->current_end_time = GST_CLOCK_TIME_NONE;
583
584 ret = gst_pad_event_default (pad, parent, event);
585 break;
586 }
587 default:{
588 ret = gst_pad_event_default (pad, parent, event);
589 break;
590 }
591 }
592 return ret;
593 }
594
595 static gboolean
plugin_init(GstPlugin * plugin)596 plugin_init (GstPlugin * plugin)
597 {
598 return GST_ELEMENT_REGISTER (dvbsubenc, plugin);
599 }
600
601 GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
602 GST_VERSION_MINOR,
603 dvbsubenc,
604 "DVB subtitle parser and encoder", plugin_init,
605 VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
606