1 /* GStreamer
2 * Copyright (C) <2008> Wim Taymans <wim.taymans@gmail.com>
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
13 *
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
18 */
19
20 #ifdef HAVE_CONFIG_H
21 # include "config.h"
22 #endif
23
24 #include <gst/rtp/gstrtpbuffer.h>
25 #include <gst/video/video.h>
26
27 #include <string.h>
28 #include <stdlib.h>
29 #include "gstrtpelements.h"
30 #include "gstrtpvrawdepay.h"
31 #include "gstrtputils.h"
32
33 GST_DEBUG_CATEGORY_STATIC (rtpvrawdepay_debug);
34 #define GST_CAT_DEFAULT (rtpvrawdepay_debug)
35
36 static GstStaticPadTemplate gst_rtp_vraw_depay_src_template =
37 GST_STATIC_PAD_TEMPLATE ("src",
38 GST_PAD_SRC,
39 GST_PAD_ALWAYS,
40 GST_STATIC_CAPS ("video/x-raw")
41 );
42
43 static GstStaticPadTemplate gst_rtp_vraw_depay_sink_template =
44 GST_STATIC_PAD_TEMPLATE ("sink",
45 GST_PAD_SINK,
46 GST_PAD_ALWAYS,
47 GST_STATIC_CAPS ("application/x-rtp, "
48 "media = (string) \"video\", "
49 "clock-rate = (int) 90000, "
50 "encoding-name = (string) \"RAW\", "
51 "sampling = (string) { \"RGB\", \"RGBA\", \"BGR\", \"BGRA\", "
52 "\"YCbCr-4:4:4\", \"YCbCr-4:2:2\", \"YCbCr-4:2:0\", "
53 "\"YCbCr-4:1:1\" },"
54 /* we cannot express these as strings
55 * "width = (string) [1 32767],"
56 * "height = (string) [1 32767],"
57 */
58 "depth = (string) { \"8\", \"10\", \"12\", \"16\" }")
59 );
60
61 #define gst_rtp_vraw_depay_parent_class parent_class
62 G_DEFINE_TYPE (GstRtpVRawDepay, gst_rtp_vraw_depay,
63 GST_TYPE_RTP_BASE_DEPAYLOAD);
64 GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpvrawdepay, "rtpvrawdepay",
65 GST_RANK_SECONDARY, GST_TYPE_RTP_VRAW_DEPAY, rtp_element_init (plugin));
66
67 static gboolean gst_rtp_vraw_depay_setcaps (GstRTPBaseDepayload * depayload,
68 GstCaps * caps);
69 static GstBuffer *gst_rtp_vraw_depay_process_packet (GstRTPBaseDepayload *
70 depay, GstRTPBuffer * rtp);
71
72 static GstStateChangeReturn gst_rtp_vraw_depay_change_state (GstElement *
73 element, GstStateChange transition);
74
75 static gboolean gst_rtp_vraw_depay_handle_event (GstRTPBaseDepayload * filter,
76 GstEvent * event);
77
78 static void
gst_rtp_vraw_depay_class_init(GstRtpVRawDepayClass * klass)79 gst_rtp_vraw_depay_class_init (GstRtpVRawDepayClass * klass)
80 {
81 GstElementClass *gstelement_class;
82 GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
83
84 gstelement_class = (GstElementClass *) klass;
85 gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
86
87 gstelement_class->change_state = gst_rtp_vraw_depay_change_state;
88
89 gstrtpbasedepayload_class->set_caps = gst_rtp_vraw_depay_setcaps;
90 gstrtpbasedepayload_class->process_rtp_packet =
91 gst_rtp_vraw_depay_process_packet;
92 gstrtpbasedepayload_class->handle_event = gst_rtp_vraw_depay_handle_event;
93
94 gst_element_class_add_static_pad_template (gstelement_class,
95 &gst_rtp_vraw_depay_src_template);
96 gst_element_class_add_static_pad_template (gstelement_class,
97 &gst_rtp_vraw_depay_sink_template);
98
99 gst_element_class_set_static_metadata (gstelement_class,
100 "RTP Raw Video depayloader", "Codec/Depayloader/Network/RTP",
101 "Extracts raw video from RTP packets (RFC 4175)",
102 "Wim Taymans <wim.taymans@gmail.com>");
103
104 GST_DEBUG_CATEGORY_INIT (rtpvrawdepay_debug, "rtpvrawdepay", 0,
105 "raw video RTP Depayloader");
106 }
107
108 static void
gst_rtp_vraw_depay_init(GstRtpVRawDepay * rtpvrawdepay)109 gst_rtp_vraw_depay_init (GstRtpVRawDepay * rtpvrawdepay)
110 {
111 }
112
113 static void
gst_rtp_vraw_depay_reset(GstRtpVRawDepay * rtpvrawdepay,gboolean full)114 gst_rtp_vraw_depay_reset (GstRtpVRawDepay * rtpvrawdepay, gboolean full)
115 {
116 if (rtpvrawdepay->outbuf) {
117 gst_video_frame_unmap (&rtpvrawdepay->frame);
118 gst_buffer_unref (rtpvrawdepay->outbuf);
119 rtpvrawdepay->outbuf = NULL;
120 }
121 rtpvrawdepay->timestamp = -1;
122
123 if (full && rtpvrawdepay->pool) {
124 gst_buffer_pool_set_active (rtpvrawdepay->pool, FALSE);
125 gst_object_unref (rtpvrawdepay->pool);
126 rtpvrawdepay->pool = NULL;
127 }
128 }
129
130 static GstFlowReturn
gst_rtp_vraw_depay_negotiate_pool(GstRtpVRawDepay * depay,GstCaps * caps,GstVideoInfo * info)131 gst_rtp_vraw_depay_negotiate_pool (GstRtpVRawDepay * depay, GstCaps * caps,
132 GstVideoInfo * info)
133 {
134 GstQuery *query;
135 GstBufferPool *pool = NULL;
136 guint size, min, max;
137 GstStructure *config;
138
139 /* find a pool for the negotiated caps now */
140 query = gst_query_new_allocation (caps, TRUE);
141
142 if (!gst_pad_peer_query (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depay), query)) {
143 /* not a problem, we use the defaults of query */
144 GST_DEBUG_OBJECT (depay, "could not get downstream ALLOCATION hints");
145 }
146
147 if (gst_query_get_n_allocation_pools (query) > 0) {
148 /* we got configuration from our peer, parse them */
149 gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
150 } else {
151 GST_DEBUG_OBJECT (depay, "didn't get downstream pool hints");
152 size = info->size;
153 min = max = 0;
154 }
155
156 if (pool == NULL) {
157 /* we did not get a pool, make one ourselves then */
158 pool = gst_video_buffer_pool_new ();
159 }
160
161 if (depay->pool)
162 gst_object_unref (depay->pool);
163 depay->pool = pool;
164
165 config = gst_buffer_pool_get_config (pool);
166 gst_buffer_pool_config_set_params (config, caps, size, min, max);
167 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
168 /* just set the metadata, if the pool can support it we will transparently use
169 * it through the video info API. We could also see if the pool support this
170 * metadata and only activate it then. */
171 gst_buffer_pool_config_add_option (config,
172 GST_BUFFER_POOL_OPTION_VIDEO_META);
173 }
174
175 gst_buffer_pool_set_config (pool, config);
176 /* and activate */
177 gst_buffer_pool_set_active (pool, TRUE);
178
179 gst_query_unref (query);
180
181 return GST_FLOW_OK;
182 }
183
184 static gboolean
gst_rtp_vraw_depay_setcaps(GstRTPBaseDepayload * depayload,GstCaps * caps)185 gst_rtp_vraw_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
186 {
187 GstStructure *structure;
188 GstRtpVRawDepay *rtpvrawdepay;
189 gint clock_rate;
190 const gchar *str;
191 gint format, width, height, depth, pgroup, xinc, yinc;
192 GstCaps *srccaps;
193 gboolean res;
194 GstFlowReturn ret;
195
196 rtpvrawdepay = GST_RTP_VRAW_DEPAY (depayload);
197
198 structure = gst_caps_get_structure (caps, 0);
199
200 xinc = yinc = 1;
201
202 if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
203 clock_rate = 90000; /* default */
204 depayload->clock_rate = clock_rate;
205
206 if (!(str = gst_structure_get_string (structure, "width")))
207 goto no_width;
208 width = atoi (str);
209
210 if (!(str = gst_structure_get_string (structure, "height")))
211 goto no_height;
212 height = atoi (str);
213
214 if (!(str = gst_structure_get_string (structure, "depth")))
215 goto no_depth;
216 depth = atoi (str);
217
218 /* optional interlace value but we don't handle interlaced
219 * formats yet */
220 if (gst_structure_get_string (structure, "interlace"))
221 goto interlaced;
222
223 if (!(str = gst_structure_get_string (structure, "sampling")))
224 goto no_sampling;
225
226 if (!strcmp (str, "RGB")) {
227 format = GST_VIDEO_FORMAT_RGB;
228 pgroup = 3;
229 } else if (!strcmp (str, "RGBA")) {
230 format = GST_VIDEO_FORMAT_RGBA;
231 pgroup = 4;
232 } else if (!strcmp (str, "BGR")) {
233 format = GST_VIDEO_FORMAT_BGR;
234 pgroup = 3;
235 } else if (!strcmp (str, "BGRA")) {
236 format = GST_VIDEO_FORMAT_BGRA;
237 pgroup = 4;
238 } else if (!strcmp (str, "YCbCr-4:4:4")) {
239 format = GST_VIDEO_FORMAT_AYUV;
240 pgroup = 3;
241 } else if (!strcmp (str, "YCbCr-4:2:2")) {
242 if (depth == 8) {
243 format = GST_VIDEO_FORMAT_UYVY;
244 pgroup = 4;
245 } else if (depth == 10) {
246 format = GST_VIDEO_FORMAT_UYVP;
247 pgroup = 5;
248 } else
249 goto unknown_format;
250 xinc = 2;
251 } else if (!strcmp (str, "YCbCr-4:2:0")) {
252 format = GST_VIDEO_FORMAT_I420;
253 pgroup = 6;
254 xinc = yinc = 2;
255 } else if (!strcmp (str, "YCbCr-4:1:1")) {
256 format = GST_VIDEO_FORMAT_Y41B;
257 pgroup = 6;
258 xinc = 4;
259 } else {
260 goto unknown_format;
261 }
262
263 gst_video_info_init (&rtpvrawdepay->vinfo);
264 gst_video_info_set_format (&rtpvrawdepay->vinfo, format, width, height);
265 GST_VIDEO_INFO_FPS_N (&rtpvrawdepay->vinfo) = 0;
266 GST_VIDEO_INFO_FPS_D (&rtpvrawdepay->vinfo) = 1;
267
268 rtpvrawdepay->pgroup = pgroup;
269 rtpvrawdepay->xinc = xinc;
270 rtpvrawdepay->yinc = yinc;
271
272 srccaps = gst_video_info_to_caps (&rtpvrawdepay->vinfo);
273 res = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
274 gst_caps_unref (srccaps);
275
276 GST_DEBUG_OBJECT (depayload, "width %d, height %d, format %d", width, height,
277 format);
278 GST_DEBUG_OBJECT (depayload, "xinc %d, yinc %d, pgroup %d",
279 xinc, yinc, pgroup);
280
281 /* negotiate a bufferpool */
282 if ((ret = gst_rtp_vraw_depay_negotiate_pool (rtpvrawdepay, srccaps,
283 &rtpvrawdepay->vinfo)) != GST_FLOW_OK)
284 goto no_bufferpool;
285
286 return res;
287
288 /* ERRORS */
289 no_width:
290 {
291 GST_ERROR_OBJECT (depayload, "no width specified");
292 return FALSE;
293 }
294 no_height:
295 {
296 GST_ERROR_OBJECT (depayload, "no height specified");
297 return FALSE;
298 }
299 no_depth:
300 {
301 GST_ERROR_OBJECT (depayload, "no depth specified");
302 return FALSE;
303 }
304 interlaced:
305 {
306 GST_ERROR_OBJECT (depayload, "interlaced formats not supported yet");
307 return FALSE;
308 }
309 no_sampling:
310 {
311 GST_ERROR_OBJECT (depayload, "no sampling specified");
312 return FALSE;
313 }
314 unknown_format:
315 {
316 GST_ERROR_OBJECT (depayload, "unknown sampling format '%s'", str);
317 return FALSE;
318 }
319 no_bufferpool:
320 {
321 GST_DEBUG_OBJECT (depayload, "no bufferpool");
322 return FALSE;
323 }
324 }
325
326 static GstBuffer *
gst_rtp_vraw_depay_process_packet(GstRTPBaseDepayload * depayload,GstRTPBuffer * rtp)327 gst_rtp_vraw_depay_process_packet (GstRTPBaseDepayload * depayload,
328 GstRTPBuffer * rtp)
329 {
330 GstRtpVRawDepay *rtpvrawdepay;
331 guint8 *payload, *p0, *yp, *up, *vp, *headers;
332 guint32 timestamp;
333 guint cont, ystride, uvstride, pgroup, payload_len;
334 gint width, height, xinc, yinc;
335 GstVideoFrame *frame;
336 gboolean marker;
337 GstBuffer *outbuf = NULL;
338
339 rtpvrawdepay = GST_RTP_VRAW_DEPAY (depayload);
340
341 timestamp = gst_rtp_buffer_get_timestamp (rtp);
342
343 if (timestamp != rtpvrawdepay->timestamp || rtpvrawdepay->outbuf == NULL) {
344 GstBuffer *new_buffer;
345 GstFlowReturn ret;
346
347 GST_LOG_OBJECT (depayload, "new frame with timestamp %u", timestamp);
348 /* new timestamp, flush old buffer and create new output buffer */
349 if (rtpvrawdepay->outbuf) {
350 gst_video_frame_unmap (&rtpvrawdepay->frame);
351 gst_rtp_base_depayload_push (depayload, rtpvrawdepay->outbuf);
352 rtpvrawdepay->outbuf = NULL;
353 }
354
355 if (gst_pad_check_reconfigure (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload))) {
356 GstCaps *caps;
357
358 caps =
359 gst_pad_get_current_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload));
360 gst_rtp_vraw_depay_negotiate_pool (rtpvrawdepay, caps,
361 &rtpvrawdepay->vinfo);
362 gst_caps_unref (caps);
363 }
364
365 ret =
366 gst_buffer_pool_acquire_buffer (rtpvrawdepay->pool, &new_buffer, NULL);
367
368 if (G_UNLIKELY (ret != GST_FLOW_OK))
369 goto alloc_failed;
370
371 /* clear timestamp from alloc... */
372 GST_BUFFER_PTS (new_buffer) = -1;
373
374 if (!gst_video_frame_map (&rtpvrawdepay->frame, &rtpvrawdepay->vinfo,
375 new_buffer, GST_MAP_WRITE | GST_VIDEO_FRAME_MAP_FLAG_NO_REF)) {
376 gst_buffer_unref (new_buffer);
377 goto invalid_frame;
378 }
379
380 rtpvrawdepay->outbuf = new_buffer;
381 rtpvrawdepay->timestamp = timestamp;
382 }
383
384 frame = &rtpvrawdepay->frame;
385
386 g_assert (frame->buffer != NULL);
387
388 /* get pointer and strides of the planes */
389 p0 = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
390 yp = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
391 up = GST_VIDEO_FRAME_COMP_DATA (frame, 1);
392 vp = GST_VIDEO_FRAME_COMP_DATA (frame, 2);
393
394 ystride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
395 uvstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 1);
396
397 pgroup = rtpvrawdepay->pgroup;
398 width = GST_VIDEO_INFO_WIDTH (&rtpvrawdepay->vinfo);
399 height = GST_VIDEO_INFO_HEIGHT (&rtpvrawdepay->vinfo);
400 xinc = rtpvrawdepay->xinc;
401 yinc = rtpvrawdepay->yinc;
402
403 payload = gst_rtp_buffer_get_payload (rtp);
404 payload_len = gst_rtp_buffer_get_payload_len (rtp);
405
406 if (payload_len < 3)
407 goto short_packet;
408
409 /* skip extended seqnum */
410 payload += 2;
411 payload_len -= 2;
412
413 /* remember header position */
414 headers = payload;
415
416 gst_rtp_copy_video_meta (rtpvrawdepay, frame->buffer, rtp->buffer);
417
418 /* find data start */
419 do {
420 if (payload_len < 6)
421 goto short_packet;
422
423 cont = payload[4] & 0x80;
424
425 payload += 6;
426 payload_len -= 6;
427 } while (cont);
428
429 while (TRUE) {
430 guint length, line, offs, plen;
431 guint8 *datap;
432
433 /* stop when we run out of data */
434 if (payload_len == 0)
435 break;
436
437 /* read length and cont. This should work because we iterated the headers
438 * above. */
439 length = (headers[0] << 8) | headers[1];
440 line = ((headers[2] & 0x7f) << 8) | headers[3];
441 offs = ((headers[4] & 0x7f) << 8) | headers[5];
442 cont = headers[4] & 0x80;
443 headers += 6;
444
445 /* length must be a multiple of pgroup */
446 if (length % pgroup != 0)
447 goto wrong_length;
448
449 if (length > payload_len)
450 length = payload_len;
451
452 /* sanity check */
453 if (line > (height - yinc)) {
454 GST_WARNING_OBJECT (depayload, "skipping line %d: out of range", line);
455 goto next;
456 }
457 if (offs > (width - xinc)) {
458 GST_WARNING_OBJECT (depayload, "skipping offset %d: out of range", offs);
459 goto next;
460 }
461
462 /* calculate the maximum amount of bytes we can use per line */
463 if (offs + ((length / pgroup) * xinc) > width) {
464 plen = ((width - offs) * pgroup) / xinc;
465 GST_WARNING_OBJECT (depayload, "clipping length %d, offset %d, plen %d",
466 length, offs, plen);
467 } else
468 plen = length;
469
470 GST_LOG_OBJECT (depayload,
471 "writing length %u/%u, line %u, offset %u, remaining %u", plen, length,
472 line, offs, payload_len);
473
474 switch (GST_VIDEO_INFO_FORMAT (&rtpvrawdepay->vinfo)) {
475 case GST_VIDEO_FORMAT_RGB:
476 case GST_VIDEO_FORMAT_RGBA:
477 case GST_VIDEO_FORMAT_BGR:
478 case GST_VIDEO_FORMAT_BGRA:
479 case GST_VIDEO_FORMAT_UYVY:
480 case GST_VIDEO_FORMAT_UYVP:
481 /* samples are packed just like gstreamer packs them */
482 offs /= xinc;
483 datap = p0 + (line * ystride) + (offs * pgroup);
484
485 memcpy (datap, payload, plen);
486 break;
487 case GST_VIDEO_FORMAT_AYUV:
488 {
489 gint i;
490 guint8 *p;
491
492 datap = p0 + (line * ystride) + (offs * 4);
493 p = payload;
494
495 /* samples are packed in order Cb-Y-Cr for both interlaced and
496 * progressive frames */
497 for (i = 0; i < plen; i += pgroup) {
498 *datap++ = 0;
499 *datap++ = p[1];
500 *datap++ = p[0];
501 *datap++ = p[2];
502 p += pgroup;
503 }
504 break;
505 }
506 case GST_VIDEO_FORMAT_I420:
507 {
508 gint i;
509 guint uvoff;
510 guint8 *yd1p, *yd2p, *udp, *vdp, *p;
511
512 yd1p = yp + (line * ystride) + (offs);
513 yd2p = yd1p + ystride;
514 uvoff = (line / yinc * uvstride) + (offs / xinc);
515
516 udp = up + uvoff;
517 vdp = vp + uvoff;
518 p = payload;
519
520 /* line 0/1: Y00-Y01-Y10-Y11-Cb00-Cr00 Y02-Y03-Y12-Y13-Cb01-Cr01 ... */
521 for (i = 0; i < plen; i += pgroup) {
522 *yd1p++ = p[0];
523 *yd1p++ = p[1];
524 *yd2p++ = p[2];
525 *yd2p++ = p[3];
526 *udp++ = p[4];
527 *vdp++ = p[5];
528 p += pgroup;
529 }
530 break;
531 }
532 case GST_VIDEO_FORMAT_Y41B:
533 {
534 gint i;
535 guint uvoff;
536 guint8 *ydp, *udp, *vdp, *p;
537
538 ydp = yp + (line * ystride) + (offs);
539 uvoff = (line / yinc * uvstride) + (offs / xinc);
540
541 udp = up + uvoff;
542 vdp = vp + uvoff;
543 p = payload;
544
545 /* Samples are packed in order Cb0-Y0-Y1-Cr0-Y2-Y3 for both interlaced
546 * and progressive scan lines */
547 for (i = 0; i < plen; i += pgroup) {
548 *udp++ = p[0];
549 *ydp++ = p[1];
550 *ydp++ = p[2];
551 *vdp++ = p[3];
552 *ydp++ = p[4];
553 *ydp++ = p[5];
554 p += pgroup;
555 }
556 break;
557 }
558 default:
559 goto unknown_sampling;
560 }
561
562 next:
563 if (!cont)
564 break;
565
566 payload += length;
567 payload_len -= length;
568 }
569
570 marker = gst_rtp_buffer_get_marker (rtp);
571
572 if (marker) {
573 GST_LOG_OBJECT (depayload, "marker, flushing frame");
574 gst_video_frame_unmap (&rtpvrawdepay->frame);
575 outbuf = rtpvrawdepay->outbuf;
576 rtpvrawdepay->outbuf = NULL;
577 rtpvrawdepay->timestamp = -1;
578 }
579 return outbuf;
580
581 /* ERRORS */
582 unknown_sampling:
583 {
584 GST_ELEMENT_ERROR (depayload, STREAM, FORMAT,
585 (NULL), ("unimplemented sampling"));
586 return NULL;
587 }
588 alloc_failed:
589 {
590 GST_WARNING_OBJECT (depayload, "failed to alloc output buffer");
591 return NULL;
592 }
593 invalid_frame:
594 {
595 GST_ERROR_OBJECT (depayload, "could not map video frame");
596 return NULL;
597 }
598 wrong_length:
599 {
600 GST_WARNING_OBJECT (depayload, "length not multiple of pgroup");
601 return NULL;
602 }
603 short_packet:
604 {
605 GST_WARNING_OBJECT (depayload, "short packet");
606 return NULL;
607 }
608 }
609
610 static gboolean
gst_rtp_vraw_depay_handle_event(GstRTPBaseDepayload * filter,GstEvent * event)611 gst_rtp_vraw_depay_handle_event (GstRTPBaseDepayload * filter, GstEvent * event)
612 {
613 gboolean ret;
614 GstRtpVRawDepay *rtpvrawdepay;
615
616 rtpvrawdepay = GST_RTP_VRAW_DEPAY (filter);
617
618 switch (GST_EVENT_TYPE (event)) {
619 case GST_EVENT_FLUSH_STOP:
620 gst_rtp_vraw_depay_reset (rtpvrawdepay, FALSE);
621 break;
622 default:
623 break;
624 }
625
626 ret =
627 GST_RTP_BASE_DEPAYLOAD_CLASS (parent_class)->handle_event (filter, event);
628
629 return ret;
630 }
631
632 static GstStateChangeReturn
gst_rtp_vraw_depay_change_state(GstElement * element,GstStateChange transition)633 gst_rtp_vraw_depay_change_state (GstElement * element,
634 GstStateChange transition)
635 {
636 GstRtpVRawDepay *rtpvrawdepay;
637 GstStateChangeReturn ret;
638
639 rtpvrawdepay = GST_RTP_VRAW_DEPAY (element);
640
641 switch (transition) {
642 case GST_STATE_CHANGE_NULL_TO_READY:
643 break;
644 case GST_STATE_CHANGE_READY_TO_PAUSED:
645 gst_rtp_vraw_depay_reset (rtpvrawdepay, TRUE);
646 break;
647 default:
648 break;
649 }
650
651 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
652
653 switch (transition) {
654 case GST_STATE_CHANGE_PAUSED_TO_READY:
655 gst_rtp_vraw_depay_reset (rtpvrawdepay, TRUE);
656 break;
657 case GST_STATE_CHANGE_READY_TO_NULL:
658 break;
659 default:
660 break;
661 }
662 return ret;
663 }
664