1 /* GStreamer
2 * Copyright (C) <2006> Wim Taymans <wim.taymans@gmail.com>
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
13 *
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
18 */
19
20 #ifdef HAVE_CONFIG_H
21 # include "config.h"
22 #endif
23
24 #include <stdio.h>
25 #include <string.h>
26
27 #include <gst/base/gstbitreader.h>
28 #include <gst/rtp/gstrtpbuffer.h>
29 #include <gst/pbutils/pbutils.h>
30 #include <gst/video/video.h>
31 #include "gstrtpelements.h"
32 #include "gstrtph264depay.h"
33 #include "gstrtputils.h"
34
35 GST_DEBUG_CATEGORY_STATIC (rtph264depay_debug);
36 #define GST_CAT_DEFAULT (rtph264depay_debug)
37
38 /* This is what we'll default to when downstream hasn't
39 * expressed a restriction or preference via caps */
40 #define DEFAULT_BYTE_STREAM TRUE
41 #define DEFAULT_ACCESS_UNIT FALSE
42 #define DEFAULT_WAIT_FOR_KEYFRAME FALSE
43 #define DEFAULT_REQUEST_KEYFRAME FALSE
44
45 enum
46 {
47 PROP_0,
48 PROP_WAIT_FOR_KEYFRAME,
49 PROP_REQUEST_KEYFRAME,
50 };
51
52
53 /* 3 zero bytes syncword */
54 static const guint8 sync_bytes[] = { 0, 0, 0, 1 };
55
56 static GstStaticPadTemplate gst_rtp_h264_depay_src_template =
57 GST_STATIC_PAD_TEMPLATE ("src",
58 GST_PAD_SRC,
59 GST_PAD_ALWAYS,
60 GST_STATIC_CAPS ("video/x-h264, "
61 "stream-format = (string) avc, alignment = (string) au; "
62 "video/x-h264, "
63 "stream-format = (string) byte-stream, alignment = (string) { nal, au }")
64 );
65
66 static GstStaticPadTemplate gst_rtp_h264_depay_sink_template =
67 GST_STATIC_PAD_TEMPLATE ("sink",
68 GST_PAD_SINK,
69 GST_PAD_ALWAYS,
70 GST_STATIC_CAPS ("application/x-rtp, "
71 "media = (string) \"video\", "
72 "clock-rate = (int) 90000, " "encoding-name = (string) \"H264\"")
73 /* optional parameters */
74 /* "profile-level-id = (string) ANY, " */
75 /* "max-mbps = (string) ANY, " */
76 /* "max-fs = (string) ANY, " */
77 /* "max-cpb = (string) ANY, " */
78 /* "max-dpb = (string) ANY, " */
79 /* "max-br = (string) ANY, " */
80 /* "redundant-pic-cap = (string) { \"0\", \"1\" }, " */
81 /* "sprop-parameter-sets = (string) ANY, " */
82 /* "parameter-add = (string) { \"0\", \"1\" }, " */
83 /* "packetization-mode = (string) { \"0\", \"1\", \"2\" }, " */
84 /* "sprop-interleaving-depth = (string) ANY, " */
85 /* "sprop-deint-buf-req = (string) ANY, " */
86 /* "deint-buf-cap = (string) ANY, " */
87 /* "sprop-init-buf-time = (string) ANY, " */
88 /* "sprop-max-don-diff = (string) ANY, " */
89 /* "max-rcmd-nalu-size = (string) ANY " */
90 );
91
92 #define gst_rtp_h264_depay_parent_class parent_class
93 G_DEFINE_TYPE_WITH_CODE (GstRtpH264Depay, gst_rtp_h264_depay,
94 GST_TYPE_RTP_BASE_DEPAYLOAD, GST_DEBUG_CATEGORY_INIT (rtph264depay_debug,
95 "rtph264depay", 0, "H264 Video RTP Depayloader"));
96 GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtph264depay, "rtph264depay",
97 GST_RANK_SECONDARY, GST_TYPE_RTP_H264_DEPAY, rtp_element_init (plugin));
98
99 static void gst_rtp_h264_depay_finalize (GObject * object);
100
101 static GstStateChangeReturn gst_rtp_h264_depay_change_state (GstElement *
102 element, GstStateChange transition);
103
104 static GstBuffer *gst_rtp_h264_depay_process (GstRTPBaseDepayload * depayload,
105 GstRTPBuffer * rtp);
106 static gboolean gst_rtp_h264_depay_setcaps (GstRTPBaseDepayload * filter,
107 GstCaps * caps);
108 static gboolean gst_rtp_h264_depay_handle_event (GstRTPBaseDepayload * depay,
109 GstEvent * event);
110 static GstBuffer *gst_rtp_h264_complete_au (GstRtpH264Depay * rtph264depay,
111 GstClockTime * out_timestamp, gboolean * out_keyframe);
112 static void gst_rtp_h264_depay_push (GstRtpH264Depay * rtph264depay,
113 GstBuffer * outbuf, gboolean keyframe, GstClockTime timestamp,
114 gboolean marker);
115
116 static void
gst_rtp_h264_depay_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)117 gst_rtp_h264_depay_set_property (GObject * object, guint prop_id,
118 const GValue * value, GParamSpec * pspec)
119 {
120 GstRtpH264Depay *self = GST_RTP_H264_DEPAY (object);
121
122 switch (prop_id) {
123 case PROP_WAIT_FOR_KEYFRAME:
124 self->wait_for_keyframe = g_value_get_boolean (value);
125 break;
126 case PROP_REQUEST_KEYFRAME:
127 self->request_keyframe = g_value_get_boolean (value);
128 break;
129 default:
130 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
131 break;
132 }
133 }
134
135 static void
gst_rtp_h264_depay_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)136 gst_rtp_h264_depay_get_property (GObject * object, guint prop_id,
137 GValue * value, GParamSpec * pspec)
138 {
139 GstRtpH264Depay *self = GST_RTP_H264_DEPAY (object);
140
141 switch (prop_id) {
142 case PROP_WAIT_FOR_KEYFRAME:
143 g_value_set_boolean (value, self->wait_for_keyframe);
144 break;
145 case PROP_REQUEST_KEYFRAME:
146 g_value_set_boolean (value, self->request_keyframe);
147 break;
148 default:
149 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
150 break;
151 }
152 }
153
154 static void
gst_rtp_h264_depay_class_init(GstRtpH264DepayClass * klass)155 gst_rtp_h264_depay_class_init (GstRtpH264DepayClass * klass)
156 {
157 GObjectClass *gobject_class;
158 GstElementClass *gstelement_class;
159 GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
160
161 gobject_class = (GObjectClass *) klass;
162 gstelement_class = (GstElementClass *) klass;
163 gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
164
165 gobject_class->finalize = gst_rtp_h264_depay_finalize;
166 gobject_class->set_property = gst_rtp_h264_depay_set_property;
167 gobject_class->get_property = gst_rtp_h264_depay_get_property;
168
169 /**
170 * GstRtpH264Depay:wait-for-keyframe:
171 *
172 * Wait for the next keyframe after packet loss,
173 * meaningful only when outputting access units
174 *
175 * Since: 1.20
176 */
177 g_object_class_install_property (gobject_class, PROP_WAIT_FOR_KEYFRAME,
178 g_param_spec_boolean ("wait-for-keyframe", "Wait for Keyframe",
179 "Wait for the next keyframe after packet loss, meaningful only when "
180 "outputting access units",
181 DEFAULT_WAIT_FOR_KEYFRAME,
182 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
183
184 /**
185 * GstRtpH264Depay:request-keyframe:
186 *
187 * Request new keyframe when packet loss is detected
188 *
189 * Since: 1.20
190 */
191 g_object_class_install_property (gobject_class, PROP_REQUEST_KEYFRAME,
192 g_param_spec_boolean ("request-keyframe", "Request Keyframe",
193 "Request new keyframe when packet loss is detected",
194 DEFAULT_REQUEST_KEYFRAME,
195 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
196
197 gst_element_class_add_static_pad_template (gstelement_class,
198 &gst_rtp_h264_depay_src_template);
199 gst_element_class_add_static_pad_template (gstelement_class,
200 &gst_rtp_h264_depay_sink_template);
201
202 gst_element_class_set_static_metadata (gstelement_class,
203 "RTP H264 depayloader", "Codec/Depayloader/Network/RTP",
204 "Extracts H264 video from RTP packets (RFC 3984)",
205 "Wim Taymans <wim.taymans@gmail.com>");
206 gstelement_class->change_state = gst_rtp_h264_depay_change_state;
207
208 gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_h264_depay_process;
209 gstrtpbasedepayload_class->set_caps = gst_rtp_h264_depay_setcaps;
210 gstrtpbasedepayload_class->handle_event = gst_rtp_h264_depay_handle_event;
211 }
212
213 static void
gst_rtp_h264_depay_init(GstRtpH264Depay * rtph264depay)214 gst_rtp_h264_depay_init (GstRtpH264Depay * rtph264depay)
215 {
216 rtph264depay->adapter = gst_adapter_new ();
217 rtph264depay->picture_adapter = gst_adapter_new ();
218 rtph264depay->byte_stream = DEFAULT_BYTE_STREAM;
219 rtph264depay->merge = DEFAULT_ACCESS_UNIT;
220 rtph264depay->sps = g_ptr_array_new_with_free_func (
221 (GDestroyNotify) gst_buffer_unref);
222 rtph264depay->pps = g_ptr_array_new_with_free_func (
223 (GDestroyNotify) gst_buffer_unref);
224 rtph264depay->wait_for_keyframe = DEFAULT_WAIT_FOR_KEYFRAME;
225 rtph264depay->request_keyframe = DEFAULT_REQUEST_KEYFRAME;
226 }
227
228 static void
gst_rtp_h264_depay_reset(GstRtpH264Depay * rtph264depay,gboolean hard)229 gst_rtp_h264_depay_reset (GstRtpH264Depay * rtph264depay, gboolean hard)
230 {
231 gst_adapter_clear (rtph264depay->adapter);
232 rtph264depay->wait_start = TRUE;
233 rtph264depay->waiting_for_keyframe = rtph264depay->wait_for_keyframe;
234 gst_adapter_clear (rtph264depay->picture_adapter);
235 rtph264depay->picture_start = FALSE;
236 rtph264depay->last_keyframe = FALSE;
237 rtph264depay->last_ts = 0;
238 rtph264depay->current_fu_type = 0;
239 rtph264depay->new_codec_data = FALSE;
240 g_ptr_array_set_size (rtph264depay->sps, 0);
241 g_ptr_array_set_size (rtph264depay->pps, 0);
242
243 if (hard) {
244 if (rtph264depay->allocator != NULL) {
245 gst_object_unref (rtph264depay->allocator);
246 rtph264depay->allocator = NULL;
247 }
248 gst_allocation_params_init (&rtph264depay->params);
249 }
250 }
251
252 static void
gst_rtp_h264_depay_drain(GstRtpH264Depay * rtph264depay)253 gst_rtp_h264_depay_drain (GstRtpH264Depay * rtph264depay)
254 {
255 GstClockTime timestamp;
256 gboolean keyframe;
257 GstBuffer *outbuf;
258
259 if (!rtph264depay->picture_start)
260 return;
261
262 outbuf = gst_rtp_h264_complete_au (rtph264depay, ×tamp, &keyframe);
263 if (outbuf)
264 gst_rtp_h264_depay_push (rtph264depay, outbuf, keyframe, timestamp, FALSE);
265 }
266
267 static void
gst_rtp_h264_depay_finalize(GObject * object)268 gst_rtp_h264_depay_finalize (GObject * object)
269 {
270 GstRtpH264Depay *rtph264depay;
271
272 rtph264depay = GST_RTP_H264_DEPAY (object);
273
274 if (rtph264depay->codec_data)
275 gst_buffer_unref (rtph264depay->codec_data);
276
277 g_object_unref (rtph264depay->adapter);
278 g_object_unref (rtph264depay->picture_adapter);
279
280 g_ptr_array_free (rtph264depay->sps, TRUE);
281 g_ptr_array_free (rtph264depay->pps, TRUE);
282
283 G_OBJECT_CLASS (parent_class)->finalize (object);
284 }
285
286 static void
gst_rtp_h264_depay_negotiate(GstRtpH264Depay * rtph264depay)287 gst_rtp_h264_depay_negotiate (GstRtpH264Depay * rtph264depay)
288 {
289 GstCaps *caps;
290 gint byte_stream = -1;
291 gint merge = -1;
292
293 caps =
294 gst_pad_get_allowed_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (rtph264depay));
295
296 GST_DEBUG_OBJECT (rtph264depay, "allowed caps: %" GST_PTR_FORMAT, caps);
297
298 if (caps) {
299 if (gst_caps_get_size (caps) > 0) {
300 GstStructure *s = gst_caps_get_structure (caps, 0);
301 const gchar *str = NULL;
302
303 if ((str = gst_structure_get_string (s, "stream-format"))) {
304 if (strcmp (str, "avc") == 0) {
305 byte_stream = FALSE;
306 } else if (strcmp (str, "byte-stream") == 0) {
307 byte_stream = TRUE;
308 } else {
309 GST_DEBUG_OBJECT (rtph264depay, "unknown stream-format: %s", str);
310 }
311 }
312
313 if ((str = gst_structure_get_string (s, "alignment"))) {
314 if (strcmp (str, "au") == 0) {
315 merge = TRUE;
316 } else if (strcmp (str, "nal") == 0) {
317 merge = FALSE;
318 } else {
319 GST_DEBUG_OBJECT (rtph264depay, "unknown alignment: %s", str);
320 }
321 }
322 }
323 gst_caps_unref (caps);
324 }
325
326 if (byte_stream != -1) {
327 GST_DEBUG_OBJECT (rtph264depay, "downstream requires byte-stream %d",
328 byte_stream);
329 rtph264depay->byte_stream = byte_stream;
330 } else {
331 GST_DEBUG_OBJECT (rtph264depay, "defaulting to byte-stream %d",
332 DEFAULT_BYTE_STREAM);
333 rtph264depay->byte_stream = DEFAULT_BYTE_STREAM;
334 }
335 if (merge != -1) {
336 GST_DEBUG_OBJECT (rtph264depay, "downstream requires merge %d", merge);
337 rtph264depay->merge = merge;
338 } else {
339 GST_DEBUG_OBJECT (rtph264depay, "defaulting to merge %d",
340 DEFAULT_ACCESS_UNIT);
341 rtph264depay->merge = DEFAULT_ACCESS_UNIT;
342 }
343 }
344
345 static gboolean
parse_sps(GstMapInfo * map,guint32 * sps_id)346 parse_sps (GstMapInfo * map, guint32 * sps_id)
347 {
348 GstBitReader br = GST_BIT_READER_INIT (map->data + 4,
349 map->size - 4);
350
351 if (map->size < 5)
352 return FALSE;
353
354 if (!gst_rtp_read_golomb (&br, sps_id))
355 return FALSE;
356
357 return TRUE;
358 }
359
360 static gboolean
parse_pps(GstMapInfo * map,guint32 * sps_id,guint32 * pps_id)361 parse_pps (GstMapInfo * map, guint32 * sps_id, guint32 * pps_id)
362 {
363 GstBitReader br = GST_BIT_READER_INIT (map->data + 1,
364 map->size - 1);
365
366 if (map->size < 2)
367 return FALSE;
368
369 if (!gst_rtp_read_golomb (&br, pps_id))
370 return FALSE;
371 if (!gst_rtp_read_golomb (&br, sps_id))
372 return FALSE;
373
374 return TRUE;
375 }
376
377 static gboolean
gst_rtp_h264_depay_set_output_caps(GstRtpH264Depay * rtph264depay,GstCaps * caps)378 gst_rtp_h264_depay_set_output_caps (GstRtpH264Depay * rtph264depay,
379 GstCaps * caps)
380 {
381 GstAllocationParams params;
382 GstAllocator *allocator = NULL;
383 GstPad *srcpad;
384 gboolean res;
385
386 gst_allocation_params_init (¶ms);
387
388 srcpad = GST_RTP_BASE_DEPAYLOAD_SRCPAD (rtph264depay);
389 res = gst_pad_set_caps (srcpad, caps);
390 if (res) {
391 GstQuery *query;
392
393 query = gst_query_new_allocation (caps, TRUE);
394 if (!gst_pad_peer_query (srcpad, query)) {
395 GST_DEBUG_OBJECT (rtph264depay, "downstream ALLOCATION query failed");
396 }
397
398 if (gst_query_get_n_allocation_params (query) > 0) {
399 gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
400 }
401
402 gst_query_unref (query);
403 }
404
405 if (rtph264depay->allocator)
406 gst_object_unref (rtph264depay->allocator);
407
408 rtph264depay->allocator = allocator;
409 rtph264depay->params = params;
410
411 return res;
412 }
413
414 static gboolean
gst_rtp_h264_set_src_caps(GstRtpH264Depay * rtph264depay)415 gst_rtp_h264_set_src_caps (GstRtpH264Depay * rtph264depay)
416 {
417 gboolean res = TRUE;
418 GstCaps *srccaps;
419 GstCaps *old_caps;
420 GstPad *srcpad;
421
422 if (!rtph264depay->byte_stream &&
423 (!rtph264depay->new_codec_data ||
424 rtph264depay->sps->len == 0 || rtph264depay->pps->len == 0))
425 return TRUE;
426
427 srccaps = gst_caps_new_simple ("video/x-h264",
428 "stream-format", G_TYPE_STRING,
429 rtph264depay->byte_stream ? "byte-stream" : "avc",
430 "alignment", G_TYPE_STRING, rtph264depay->merge ? "au" : "nal", NULL);
431
432 if (!rtph264depay->byte_stream) {
433 GstBuffer *codec_data;
434 GstMapInfo map;
435 GstMapInfo nalmap;
436 guint8 *data;
437 guint len;
438 guint new_size;
439 guint i, first_sps, num_sps, first_pps, num_pps;
440 guchar level = 0;
441 guchar profile_compat = G_MAXUINT8;
442
443 /* start with 7 bytes header */
444 len = 7;
445 /* count sps & pps */
446 for (i = 0; i < rtph264depay->sps->len; i++)
447 len += 2 + gst_buffer_get_size (g_ptr_array_index (rtph264depay->sps, i));
448 for (i = 0; i < rtph264depay->pps->len; i++)
449 len += 2 + gst_buffer_get_size (g_ptr_array_index (rtph264depay->pps, i));
450
451 codec_data = gst_buffer_new_and_alloc (len);
452 gst_buffer_map (codec_data, &map, GST_MAP_READWRITE);
453 data = map.data;
454
455 /* 8 bits version == 1 */
456 *data++ = 1;
457
458 /* According to: ISO/IEC 14496-15:2004(E) section 5.2.4.1
459 * The level is the max level of all SPSes
460 * A profile compat bit can only be set if all SPSes include that bit
461 */
462 for (i = 0; i < rtph264depay->sps->len; i++) {
463 gst_buffer_map (g_ptr_array_index (rtph264depay->sps, i), &nalmap,
464 GST_MAP_READ);
465 profile_compat &= nalmap.data[2];
466 level = MAX (level, nalmap.data[3]);
467 gst_buffer_unmap (g_ptr_array_index (rtph264depay->sps, i), &nalmap);
468 }
469
470 /* Assume all SPSes use the same profile, so extract from the first SPS */
471 gst_buffer_map (g_ptr_array_index (rtph264depay->sps, 0), &nalmap,
472 GST_MAP_READ);
473 *data++ = nalmap.data[1];
474 gst_buffer_unmap (g_ptr_array_index (rtph264depay->sps, 0), &nalmap);
475 *data++ = profile_compat;
476 *data++ = level;
477
478 /* 6 bits reserved | 2 bits lengthSizeMinusOn */
479 *data++ = 0xff;
480
481 if (rtph264depay->sps->len > 31) {
482 GST_WARNING_OBJECT (rtph264depay,
483 "Too many SPS to put in codec_data. Sending the most recent 31");
484 num_sps = 31;
485 first_sps = rtph264depay->sps->len - 31;
486 } else {
487 num_sps = rtph264depay->sps->len;
488 first_sps = 0;
489 }
490
491 /* 3 bits reserved | 5 bits numOfSequenceParameterSets */
492 *data++ = 0xe0 | (num_sps & 0x1f);
493
494 /* copy all SPS */
495 for (i = first_sps; i < rtph264depay->sps->len; i++) {
496 gst_buffer_map (g_ptr_array_index (rtph264depay->sps, i), &nalmap,
497 GST_MAP_READ);
498
499 GST_DEBUG_OBJECT (rtph264depay, "copy SPS %d of length %u", i,
500 (guint) nalmap.size);
501 GST_WRITE_UINT16_BE (data, nalmap.size);
502 data += 2;
503 memcpy (data, nalmap.data, nalmap.size);
504 data += nalmap.size;
505 gst_buffer_unmap (g_ptr_array_index (rtph264depay->sps, i), &nalmap);
506 }
507
508 if (rtph264depay->pps->len > 255) {
509 GST_WARNING_OBJECT (rtph264depay,
510 "Too many PPS to put in codec_data. Sending the most recent 255");
511 num_pps = 255;
512 first_pps = rtph264depay->pps->len - 255;
513 } else {
514 num_pps = rtph264depay->pps->len;
515 first_pps = 0;
516 }
517
518 /* 8 bits numOfPictureParameterSets */
519 *data++ = num_pps;
520
521 /* copy all PPS */
522 for (i = first_pps; i < rtph264depay->pps->len; i++) {
523 gst_buffer_map (g_ptr_array_index (rtph264depay->pps, i), &nalmap,
524 GST_MAP_READ);
525
526 GST_DEBUG_OBJECT (rtph264depay, "copy PPS %d of length %u", i,
527 (guint) nalmap.size);
528 GST_WRITE_UINT16_BE (data, nalmap.size);
529 data += 2;
530 memcpy (data, nalmap.data, nalmap.size);
531 data += nalmap.size;
532 gst_buffer_unmap (g_ptr_array_index (rtph264depay->pps, i), &nalmap);
533 }
534
535 new_size = data - map.data;
536 gst_buffer_unmap (codec_data, &map);
537 gst_buffer_set_size (codec_data, new_size);
538
539 gst_caps_set_simple (srccaps,
540 "codec_data", GST_TYPE_BUFFER, codec_data, NULL);
541 gst_buffer_unref (codec_data);
542 }
543
544 /* Set profile a level from SPS */
545 {
546 gint i;
547 GstBuffer *max_level_sps = NULL;
548 gint level = 0;
549 GstMapInfo nalmap;
550
551 /* Get the SPS with the highest level. We assume
552 * all SPS have the same profile */
553 for (i = 0; i < rtph264depay->sps->len; i++) {
554 gst_buffer_map (g_ptr_array_index (rtph264depay->sps, i), &nalmap,
555 GST_MAP_READ);
556 if (level == 0 || level < nalmap.data[3]) {
557 max_level_sps = g_ptr_array_index (rtph264depay->sps, i);
558 level = nalmap.data[3];
559 }
560 gst_buffer_unmap (g_ptr_array_index (rtph264depay->sps, i), &nalmap);
561 }
562
563 if (max_level_sps) {
564 gst_buffer_map (max_level_sps, &nalmap, GST_MAP_READ);
565 gst_codec_utils_h264_caps_set_level_and_profile (srccaps, nalmap.data + 1,
566 nalmap.size - 1);
567 gst_buffer_unmap (max_level_sps, &nalmap);
568 }
569 }
570
571 srcpad = GST_RTP_BASE_DEPAYLOAD_SRCPAD (rtph264depay);
572
573 old_caps = gst_pad_get_current_caps (srcpad);
574
575 if (old_caps == NULL || !gst_caps_is_equal (srccaps, old_caps)) {
576 res = gst_rtp_h264_depay_set_output_caps (rtph264depay, srccaps);
577 }
578
579 gst_clear_caps (&old_caps);
580 gst_caps_unref (srccaps);
581
582 /* Insert SPS and PPS into the stream on next opportunity (if bytestream) */
583 if (rtph264depay->byte_stream
584 && (rtph264depay->sps->len > 0 || rtph264depay->pps->len > 0)) {
585 gint i;
586 GstBuffer *codec_data;
587 GstMapInfo map;
588 guint8 *data;
589 guint len = 0;
590
591 for (i = 0; i < rtph264depay->sps->len; i++) {
592 len += 4 + gst_buffer_get_size (g_ptr_array_index (rtph264depay->sps, i));
593 }
594
595 for (i = 0; i < rtph264depay->pps->len; i++) {
596 len += 4 + gst_buffer_get_size (g_ptr_array_index (rtph264depay->pps, i));
597 }
598
599 codec_data = gst_buffer_new_and_alloc (len);
600 gst_buffer_map (codec_data, &map, GST_MAP_WRITE);
601 data = map.data;
602
603 for (i = 0; i < rtph264depay->sps->len; i++) {
604 GstBuffer *sps_buf = g_ptr_array_index (rtph264depay->sps, i);
605 guint sps_size = gst_buffer_get_size (sps_buf);
606
607 if (rtph264depay->byte_stream)
608 memcpy (data, sync_bytes, sizeof (sync_bytes));
609 else
610 GST_WRITE_UINT32_BE (data, sps_size);
611 gst_buffer_extract (sps_buf, 0, data + 4, -1);
612 data += 4 + sps_size;
613 }
614
615 for (i = 0; i < rtph264depay->pps->len; i++) {
616 GstBuffer *pps_buf = g_ptr_array_index (rtph264depay->pps, i);
617 guint pps_size = gst_buffer_get_size (pps_buf);
618
619 if (rtph264depay->byte_stream)
620 memcpy (data, sync_bytes, sizeof (sync_bytes));
621 else
622 GST_WRITE_UINT32_BE (data, pps_size);
623 gst_buffer_extract (pps_buf, 0, data + 4, -1);
624 data += 4 + pps_size;
625 }
626
627 gst_buffer_unmap (codec_data, &map);
628 if (rtph264depay->codec_data)
629 gst_buffer_unref (rtph264depay->codec_data);
630 rtph264depay->codec_data = codec_data;
631 }
632
633 if (res)
634 rtph264depay->new_codec_data = FALSE;
635
636 return res;
637 }
638
639 gboolean
gst_rtp_h264_add_sps_pps(GstElement * rtph264,GPtrArray * sps_array,GPtrArray * pps_array,GstBuffer * nal)640 gst_rtp_h264_add_sps_pps (GstElement * rtph264, GPtrArray * sps_array,
641 GPtrArray * pps_array, GstBuffer * nal)
642 {
643 GstMapInfo map;
644 guchar type;
645 guint i;
646
647 gst_buffer_map (nal, &map, GST_MAP_READ);
648
649 type = map.data[0] & 0x1f;
650
651 if (type == 7) {
652 guint32 sps_id;
653
654 if (!parse_sps (&map, &sps_id)) {
655 GST_WARNING_OBJECT (rtph264, "Invalid SPS,"
656 " can't parse seq_parameter_set_id");
657 goto drop;
658 }
659
660 for (i = 0; i < sps_array->len; i++) {
661 GstBuffer *sps = g_ptr_array_index (sps_array, i);
662 GstMapInfo spsmap;
663 guint32 tmp_sps_id;
664
665 gst_buffer_map (sps, &spsmap, GST_MAP_READ);
666 parse_sps (&spsmap, &tmp_sps_id);
667
668 if (sps_id == tmp_sps_id) {
669 /* If this is already the most recent SPS and unchanged, nothing to do */
670 if (i == (sps_array->len - 1) && map.size == spsmap.size &&
671 memcmp (map.data, spsmap.data, spsmap.size) == 0) {
672 GST_LOG_OBJECT (rtph264,
673 "Unchanged SPS %u already most recent, not updating", sps_id);
674 gst_buffer_unmap (sps, &spsmap);
675 goto drop;
676 } else {
677 gst_buffer_unmap (sps, &spsmap);
678 g_ptr_array_remove_index (sps_array, i);
679 g_ptr_array_add (sps_array, nal);
680 GST_LOG_OBJECT (rtph264, "Modified SPS %u, replacing", sps_id);
681 goto done;
682 }
683 }
684 gst_buffer_unmap (sps, &spsmap);
685 }
686 GST_LOG_OBJECT (rtph264, "Adding new SPS %u", sps_id);
687 g_ptr_array_add (sps_array, nal);
688 } else if (type == 8) {
689 guint32 sps_id;
690 guint32 pps_id;
691
692 if (!parse_pps (&map, &sps_id, &pps_id)) {
693 GST_WARNING_OBJECT (rtph264, "Invalid PPS,"
694 " can't parse seq_parameter_set_id or pic_parameter_set_id");
695 goto drop;
696 }
697
698 for (i = 0; i < pps_array->len; i++) {
699 GstBuffer *pps = g_ptr_array_index (pps_array, i);
700 GstMapInfo ppsmap;
701 guint32 tmp_sps_id;
702 guint32 tmp_pps_id;
703
704
705 gst_buffer_map (pps, &ppsmap, GST_MAP_READ);
706 parse_pps (&ppsmap, &tmp_sps_id, &tmp_pps_id);
707
708 if (pps_id == tmp_pps_id) {
709 /* If this is already the most recent PPS and unchanged, nothing to do */
710 if (i == (pps_array->len - 1) && map.size == ppsmap.size &&
711 memcmp (map.data, ppsmap.data, ppsmap.size) == 0) {
712 GST_LOG_OBJECT (rtph264,
713 "Unchanged PPS %u:%u already most recent, not updating", sps_id,
714 pps_id);
715 gst_buffer_unmap (pps, &ppsmap);
716 goto drop;
717 } else {
718 gst_buffer_unmap (pps, &ppsmap);
719 g_ptr_array_remove_index (pps_array, i);
720 g_ptr_array_add (pps_array, nal);
721 GST_LOG_OBJECT (rtph264, "Modified PPS %u:%u, replacing",
722 sps_id, pps_id);
723 goto done;
724 }
725 }
726 gst_buffer_unmap (pps, &ppsmap);
727 }
728 GST_LOG_OBJECT (rtph264, "Adding new PPS %u:%i", sps_id, pps_id);
729 g_ptr_array_add (pps_array, nal);
730 } else {
731 goto drop;
732 }
733
734 done:
735 gst_buffer_unmap (nal, &map);
736
737 return TRUE;
738
739 drop:
740 gst_buffer_unmap (nal, &map);
741 gst_buffer_unref (nal);
742
743 return FALSE;
744 }
745
746
747 static void
gst_rtp_h264_depay_add_sps_pps(GstRtpH264Depay * rtph264depay,GstBuffer * nal)748 gst_rtp_h264_depay_add_sps_pps (GstRtpH264Depay * rtph264depay, GstBuffer * nal)
749 {
750 if (gst_rtp_h264_add_sps_pps (GST_ELEMENT (rtph264depay),
751 rtph264depay->sps, rtph264depay->pps, nal))
752 rtph264depay->new_codec_data = TRUE;
753 }
754
755 static gboolean
gst_rtp_h264_depay_setcaps(GstRTPBaseDepayload * depayload,GstCaps * caps)756 gst_rtp_h264_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
757 {
758 gint clock_rate;
759 GstStructure *structure = gst_caps_get_structure (caps, 0);
760 GstRtpH264Depay *rtph264depay;
761 const gchar *ps;
762 GstBuffer *codec_data;
763 GstMapInfo map;
764 guint8 *ptr;
765
766 rtph264depay = GST_RTP_H264_DEPAY (depayload);
767
768 if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
769 clock_rate = 90000;
770 depayload->clock_rate = clock_rate;
771
772 /* Base64 encoded, comma separated config NALs */
773 ps = gst_structure_get_string (structure, "sprop-parameter-sets");
774
775 /* negotiate with downstream w.r.t. output format and alignment */
776 gst_rtp_h264_depay_negotiate (rtph264depay);
777
778 if (rtph264depay->byte_stream && ps != NULL) {
779 /* for bytestream we only need the parameter sets but we don't error out
780 * when they are not there, we assume they are in the stream. */
781 gchar **params;
782 guint len, total;
783 gint i;
784
785 params = g_strsplit (ps, ",", 0);
786
787 /* count total number of bytes in base64. Also include the sync bytes in
788 * front of the params. */
789 len = 0;
790 for (i = 0; params[i]; i++) {
791 len += strlen (params[i]);
792 len += sizeof (sync_bytes);
793 }
794 /* we seriously overshoot the length, but it's fine. */
795 codec_data = gst_buffer_new_and_alloc (len);
796
797 gst_buffer_map (codec_data, &map, GST_MAP_WRITE);
798 ptr = map.data;
799 total = 0;
800 for (i = 0; params[i]; i++) {
801 guint save = 0;
802 gint state = 0;
803
804 GST_DEBUG_OBJECT (depayload, "decoding param %d (%s)", i, params[i]);
805 memcpy (ptr, sync_bytes, sizeof (sync_bytes));
806 ptr += sizeof (sync_bytes);
807 len =
808 g_base64_decode_step (params[i], strlen (params[i]), ptr, &state,
809 &save);
810 GST_DEBUG_OBJECT (depayload, "decoded %d bytes", len);
811 total += len + sizeof (sync_bytes);
812 ptr += len;
813 }
814 gst_buffer_unmap (codec_data, &map);
815 gst_buffer_resize (codec_data, 0, total);
816 g_strfreev (params);
817
818 /* keep the codec_data, we need to send it as the first buffer. We cannot
819 * push it in the adapter because the adapter might be flushed on discont.
820 */
821 if (rtph264depay->codec_data)
822 gst_buffer_unref (rtph264depay->codec_data);
823 rtph264depay->codec_data = codec_data;
824 } else if (!rtph264depay->byte_stream) {
825 gchar **params;
826 gint i;
827
828 if (ps == NULL)
829 goto incomplete_caps;
830
831 params = g_strsplit (ps, ",", 0);
832
833 GST_DEBUG_OBJECT (depayload, "we have %d params", g_strv_length (params));
834
835 /* start with 7 bytes header */
836 for (i = 0; params[i]; i++) {
837 GstBuffer *nal;
838 GstMapInfo nalmap;
839 gsize nal_len;
840 guint save = 0;
841 gint state = 0;
842
843 nal_len = strlen (params[i]);
844 if (nal_len == 0) {
845 GST_WARNING_OBJECT (depayload, "empty param '%s' (#%d)", params[i], i);
846 continue;
847 }
848 nal = gst_buffer_new_and_alloc (nal_len);
849 gst_buffer_map (nal, &nalmap, GST_MAP_READWRITE);
850
851 nal_len =
852 g_base64_decode_step (params[i], nal_len, nalmap.data, &state, &save);
853
854 GST_DEBUG_OBJECT (depayload, "adding param %d as %s", i,
855 ((nalmap.data[0] & 0x1f) == 7) ? "SPS" : "PPS");
856
857 gst_buffer_unmap (nal, &nalmap);
858 gst_buffer_set_size (nal, nal_len);
859
860 gst_rtp_h264_depay_add_sps_pps (rtph264depay, nal);
861 }
862 g_strfreev (params);
863
864 if (rtph264depay->sps->len == 0 || rtph264depay->pps->len == 0)
865 goto incomplete_caps;
866 }
867
868 return gst_rtp_h264_set_src_caps (rtph264depay);
869
870 /* ERRORS */
871 incomplete_caps:
872 {
873 GST_DEBUG_OBJECT (depayload, "we have incomplete caps,"
874 " doing setcaps later");
875 return TRUE;
876 }
877 }
878
879 static GstBuffer *
gst_rtp_h264_depay_allocate_output_buffer(GstRtpH264Depay * depay,gsize size)880 gst_rtp_h264_depay_allocate_output_buffer (GstRtpH264Depay * depay, gsize size)
881 {
882 GstBuffer *buffer = NULL;
883
884 g_return_val_if_fail (size > 0, NULL);
885
886 GST_LOG_OBJECT (depay, "want output buffer of %u bytes", (guint) size);
887
888 buffer = gst_buffer_new_allocate (depay->allocator, size, &depay->params);
889 if (buffer == NULL) {
890 GST_INFO_OBJECT (depay, "couldn't allocate output buffer");
891 buffer = gst_buffer_new_allocate (NULL, size, NULL);
892 }
893
894 return buffer;
895 }
896
897 static GstBuffer *
gst_rtp_h264_complete_au(GstRtpH264Depay * rtph264depay,GstClockTime * out_timestamp,gboolean * out_keyframe)898 gst_rtp_h264_complete_au (GstRtpH264Depay * rtph264depay,
899 GstClockTime * out_timestamp, gboolean * out_keyframe)
900 {
901 GstBufferList *list;
902 GstMapInfo outmap;
903 GstBuffer *outbuf;
904 guint outsize, offset = 0;
905 gint b, n_bufs, m, n_mem;
906
907 /* we had a picture in the adapter and we completed it */
908 GST_DEBUG_OBJECT (rtph264depay, "taking completed AU");
909 outsize = gst_adapter_available (rtph264depay->picture_adapter);
910
911 outbuf = gst_rtp_h264_depay_allocate_output_buffer (rtph264depay, outsize);
912
913 if (outbuf == NULL)
914 return NULL;
915
916 if (!gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE))
917 return NULL;
918
919 list = gst_adapter_take_buffer_list (rtph264depay->picture_adapter, outsize);
920
921 n_bufs = gst_buffer_list_length (list);
922 for (b = 0; b < n_bufs; ++b) {
923 GstBuffer *buf = gst_buffer_list_get (list, b);
924
925 n_mem = gst_buffer_n_memory (buf);
926 for (m = 0; m < n_mem; ++m) {
927 GstMemory *mem = gst_buffer_peek_memory (buf, m);
928 gsize mem_size = gst_memory_get_sizes (mem, NULL, NULL);
929 GstMapInfo mem_map;
930
931 if (gst_memory_map (mem, &mem_map, GST_MAP_READ)) {
932 memcpy (outmap.data + offset, mem_map.data, mem_size);
933 gst_memory_unmap (mem, &mem_map);
934 } else {
935 memset (outmap.data + offset, 0, mem_size);
936 }
937 offset += mem_size;
938 }
939
940 gst_rtp_copy_video_meta (rtph264depay, outbuf, buf);
941 }
942 gst_buffer_list_unref (list);
943 gst_buffer_unmap (outbuf, &outmap);
944
945 *out_timestamp = rtph264depay->last_ts;
946 *out_keyframe = rtph264depay->last_keyframe;
947
948 rtph264depay->last_keyframe = FALSE;
949 rtph264depay->picture_start = FALSE;
950
951 return outbuf;
952 }
953
954 static void
gst_rtp_h264_depay_push(GstRtpH264Depay * rtph264depay,GstBuffer * outbuf,gboolean keyframe,GstClockTime timestamp,gboolean marker)955 gst_rtp_h264_depay_push (GstRtpH264Depay * rtph264depay, GstBuffer * outbuf,
956 gboolean keyframe, GstClockTime timestamp, gboolean marker)
957 {
958 /* prepend codec_data */
959 if (rtph264depay->codec_data) {
960 GST_DEBUG_OBJECT (rtph264depay, "prepending codec_data");
961 gst_rtp_copy_video_meta (rtph264depay, rtph264depay->codec_data, outbuf);
962 outbuf = gst_buffer_append (rtph264depay->codec_data, outbuf);
963 rtph264depay->codec_data = NULL;
964 keyframe = TRUE;
965 }
966 outbuf = gst_buffer_make_writable (outbuf);
967
968 gst_rtp_drop_non_video_meta (rtph264depay, outbuf);
969
970 GST_BUFFER_PTS (outbuf) = timestamp;
971
972 if (keyframe)
973 GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
974 else
975 GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
976
977 if (marker)
978 GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MARKER);
979
980 gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (rtph264depay), outbuf);
981 }
982
983 /* SPS/PPS/IDR considered key, all others DELTA;
984 * so downstream waiting for keyframe can pick up at SPS/PPS/IDR */
985 #define NAL_TYPE_IS_KEY(nt) (((nt) == 5) || ((nt) == 7) || ((nt) == 8))
986
987 static void
gst_rtp_h264_depay_handle_nal(GstRtpH264Depay * rtph264depay,GstBuffer * nal,GstClockTime in_timestamp,gboolean marker)988 gst_rtp_h264_depay_handle_nal (GstRtpH264Depay * rtph264depay, GstBuffer * nal,
989 GstClockTime in_timestamp, gboolean marker)
990 {
991 GstRTPBaseDepayload *depayload = GST_RTP_BASE_DEPAYLOAD (rtph264depay);
992 gint nal_type;
993 GstMapInfo map;
994 GstBuffer *outbuf = NULL;
995 GstClockTime out_timestamp;
996 gboolean keyframe, out_keyframe;
997
998 gst_buffer_map (nal, &map, GST_MAP_READ);
999 if (G_UNLIKELY (map.size < 5))
1000 goto short_nal;
1001
1002 nal_type = map.data[4] & 0x1f;
1003 GST_DEBUG_OBJECT (rtph264depay, "handle NAL type %d", nal_type);
1004
1005 keyframe = NAL_TYPE_IS_KEY (nal_type);
1006
1007 out_keyframe = keyframe;
1008 out_timestamp = in_timestamp;
1009
1010 if (!rtph264depay->byte_stream) {
1011 if (nal_type == 7 || nal_type == 8) {
1012 gst_rtp_h264_depay_add_sps_pps (rtph264depay,
1013 gst_buffer_copy_region (nal, GST_BUFFER_COPY_ALL,
1014 4, gst_buffer_get_size (nal) - 4));
1015 gst_buffer_unmap (nal, &map);
1016 gst_buffer_unref (nal);
1017 return;
1018 } else if (rtph264depay->sps->len == 0 || rtph264depay->pps->len == 0) {
1019 /* Down push down any buffer in non-bytestream mode if the SPS/PPS haven't
1020 * go through yet
1021 */
1022 gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (depayload),
1023 gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM,
1024 gst_structure_new ("GstForceKeyUnit",
1025 "all-headers", G_TYPE_BOOLEAN, TRUE, NULL)));
1026 gst_buffer_unmap (nal, &map);
1027 gst_buffer_unref (nal);
1028 return;
1029 }
1030
1031 if (rtph264depay->new_codec_data &&
1032 rtph264depay->sps->len > 0 && rtph264depay->pps->len > 0)
1033 gst_rtp_h264_set_src_caps (rtph264depay);
1034 }
1035
1036
1037 if (rtph264depay->merge) {
1038 gboolean start = FALSE, complete = FALSE;
1039
1040 /* consider a coded slices (IDR or not) to start a picture,
1041 * (so ending the previous one) if first_mb_in_slice == 0
1042 * (non-0 is part of previous one) */
1043 /* NOTE this is not entirely according to Access Unit specs in 7.4.1.2.4,
1044 * but in practice it works in sane cases, needs not much parsing,
1045 * and also works with broken frame_num in NAL (where spec-wise would fail) */
1046 /* FIXME: this code isn't correct for interlaced content as AUs should be
1047 * constructed with pairs of fields and the guess here will just push out
1048 * AUs with a single field in it */
1049 if (nal_type == 1 || nal_type == 2 || nal_type == 5) {
1050 /* we have a picture start */
1051 start = TRUE;
1052 if (map.data[5] & 0x80) {
1053 /* first_mb_in_slice == 0 completes a picture */
1054 complete = TRUE;
1055 }
1056 } else if (nal_type >= 6 && nal_type <= 9) {
1057 /* SEI, SPS, PPS, AU terminate picture */
1058 complete = TRUE;
1059 }
1060 GST_DEBUG_OBJECT (depayload, "start %d, complete %d", start, complete);
1061
1062 /* marker bit isn't mandatory so in the following code we try to guess
1063 * an AU boundary by detecting a new picture start */
1064 if (!marker) {
1065 if (complete && rtph264depay->picture_start)
1066 outbuf = gst_rtp_h264_complete_au (rtph264depay, &out_timestamp,
1067 &out_keyframe);
1068 }
1069 /* add to adapter */
1070 gst_buffer_unmap (nal, &map);
1071
1072 if (!rtph264depay->picture_start && start && out_keyframe)
1073 rtph264depay->waiting_for_keyframe = FALSE;
1074
1075 GST_DEBUG_OBJECT (depayload, "adding NAL to picture adapter");
1076 gst_adapter_push (rtph264depay->picture_adapter, nal);
1077 rtph264depay->last_ts = in_timestamp;
1078 rtph264depay->last_keyframe |= keyframe;
1079 rtph264depay->picture_start |= start;
1080
1081 if (marker)
1082 outbuf = gst_rtp_h264_complete_au (rtph264depay, &out_timestamp,
1083 &out_keyframe);
1084 } else {
1085 /* no merge, output is input nal */
1086 GST_DEBUG_OBJECT (depayload, "using NAL as output");
1087 outbuf = nal;
1088 gst_buffer_unmap (nal, &map);
1089 }
1090
1091 if (outbuf) {
1092 if (!rtph264depay->waiting_for_keyframe) {
1093 gst_rtp_h264_depay_push (rtph264depay, outbuf, out_keyframe,
1094 out_timestamp, marker);
1095 } else {
1096 GST_LOG_OBJECT (depayload,
1097 "Dropping %" GST_PTR_FORMAT ", we are waiting for a keyframe",
1098 outbuf);
1099 gst_buffer_unref (outbuf);
1100 }
1101 }
1102
1103 return;
1104
1105 /* ERRORS */
1106 short_nal:
1107 {
1108 GST_WARNING_OBJECT (depayload, "dropping short NAL");
1109 gst_buffer_unmap (nal, &map);
1110 gst_buffer_unref (nal);
1111 return;
1112 }
1113 }
1114
1115 static void
gst_rtp_h264_finish_fragmentation_unit(GstRtpH264Depay * rtph264depay)1116 gst_rtp_h264_finish_fragmentation_unit (GstRtpH264Depay * rtph264depay)
1117 {
1118 guint outsize;
1119 GstMapInfo map;
1120 GstBuffer *outbuf;
1121
1122 outsize = gst_adapter_available (rtph264depay->adapter);
1123 outbuf = gst_adapter_take_buffer (rtph264depay->adapter, outsize);
1124
1125 gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
1126 GST_DEBUG_OBJECT (rtph264depay, "output %d bytes", outsize);
1127
1128 if (rtph264depay->byte_stream) {
1129 memcpy (map.data, sync_bytes, sizeof (sync_bytes));
1130 } else {
1131 outsize -= 4;
1132 map.data[0] = (outsize >> 24);
1133 map.data[1] = (outsize >> 16);
1134 map.data[2] = (outsize >> 8);
1135 map.data[3] = (outsize);
1136 }
1137 gst_buffer_unmap (outbuf, &map);
1138
1139 rtph264depay->current_fu_type = 0;
1140
1141 gst_rtp_h264_depay_handle_nal (rtph264depay, outbuf,
1142 rtph264depay->fu_timestamp, rtph264depay->fu_marker);
1143 }
1144
1145 static GstBuffer *
gst_rtp_h264_depay_process(GstRTPBaseDepayload * depayload,GstRTPBuffer * rtp)1146 gst_rtp_h264_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
1147 {
1148 GstRtpH264Depay *rtph264depay;
1149 GstBuffer *outbuf = NULL;
1150 guint8 nal_unit_type;
1151
1152 rtph264depay = GST_RTP_H264_DEPAY (depayload);
1153
1154 if (!rtph264depay->merge)
1155 rtph264depay->waiting_for_keyframe = FALSE;
1156
1157 /* flush remaining data on discont */
1158 if (GST_BUFFER_IS_DISCONT (rtp->buffer)) {
1159 gst_adapter_clear (rtph264depay->adapter);
1160 rtph264depay->wait_start = TRUE;
1161 rtph264depay->current_fu_type = 0;
1162 rtph264depay->last_fu_seqnum = 0;
1163
1164 if (rtph264depay->merge && rtph264depay->wait_for_keyframe) {
1165 rtph264depay->waiting_for_keyframe = TRUE;
1166 }
1167
1168
1169 if (rtph264depay->request_keyframe)
1170 gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (depayload),
1171 gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE,
1172 TRUE, 0));
1173 }
1174
1175 {
1176 gint payload_len;
1177 guint8 *payload;
1178 guint header_len;
1179 guint8 nal_ref_idc;
1180 GstMapInfo map;
1181 guint outsize, nalu_size;
1182 GstClockTime timestamp;
1183 gboolean marker;
1184
1185 timestamp = GST_BUFFER_PTS (rtp->buffer);
1186
1187 payload_len = gst_rtp_buffer_get_payload_len (rtp);
1188 payload = gst_rtp_buffer_get_payload (rtp);
1189 marker = gst_rtp_buffer_get_marker (rtp);
1190
1191 GST_DEBUG_OBJECT (rtph264depay, "receiving %d bytes", payload_len);
1192
1193 if (payload_len == 0)
1194 goto empty_packet;
1195
1196 /* +---------------+
1197 * |0|1|2|3|4|5|6|7|
1198 * +-+-+-+-+-+-+-+-+
1199 * |F|NRI| Type |
1200 * +---------------+
1201 *
1202 * F must be 0.
1203 */
1204 nal_ref_idc = (payload[0] & 0x60) >> 5;
1205 nal_unit_type = payload[0] & 0x1f;
1206
1207 /* at least one byte header with type */
1208 header_len = 1;
1209
1210 GST_DEBUG_OBJECT (rtph264depay, "NRI %d, Type %d %s", nal_ref_idc,
1211 nal_unit_type, marker ? "marker" : "");
1212
1213 /* If FU unit was being processed, but the current nal is of a different
1214 * type. Assume that the remote payloader is buggy (didn't set the end bit
1215 * when the FU ended) and send out what we gathered thusfar */
1216 if (G_UNLIKELY (rtph264depay->current_fu_type != 0 &&
1217 nal_unit_type != rtph264depay->current_fu_type))
1218 gst_rtp_h264_finish_fragmentation_unit (rtph264depay);
1219
1220 switch (nal_unit_type) {
1221 case 0:
1222 case 30:
1223 case 31:
1224 /* undefined */
1225 goto undefined_type;
1226 case 25:
1227 /* STAP-B Single-time aggregation packet 5.7.1 */
1228 /* 2 byte extra header for DON */
1229 header_len += 2;
1230 /* fallthrough */
1231 case 24:
1232 {
1233 /* strip headers */
1234 payload += header_len;
1235 payload_len -= header_len;
1236
1237 rtph264depay->wait_start = FALSE;
1238
1239
1240 /* STAP-A Single-time aggregation packet 5.7.1 */
1241 while (payload_len > 2) {
1242 gboolean last = FALSE;
1243
1244 /* 1
1245 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
1246 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
1247 * | NALU Size |
1248 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
1249 */
1250 nalu_size = (payload[0] << 8) | payload[1];
1251
1252 /* don't include nalu_size */
1253 if (nalu_size > (payload_len - 2))
1254 nalu_size = payload_len - 2;
1255
1256 outsize = nalu_size + sizeof (sync_bytes);
1257 outbuf = gst_buffer_new_and_alloc (outsize);
1258
1259 gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
1260 if (rtph264depay->byte_stream) {
1261 memcpy (map.data, sync_bytes, sizeof (sync_bytes));
1262 } else {
1263 map.data[0] = map.data[1] = 0;
1264 map.data[2] = payload[0];
1265 map.data[3] = payload[1];
1266 }
1267
1268 /* strip NALU size */
1269 payload += 2;
1270 payload_len -= 2;
1271
1272 memcpy (map.data + sizeof (sync_bytes), payload, nalu_size);
1273 gst_buffer_unmap (outbuf, &map);
1274
1275 gst_rtp_copy_video_meta (rtph264depay, outbuf, rtp->buffer);
1276
1277 if (payload_len - nalu_size <= 2)
1278 last = TRUE;
1279
1280 gst_rtp_h264_depay_handle_nal (rtph264depay, outbuf, timestamp,
1281 marker && last);
1282
1283 payload += nalu_size;
1284 payload_len -= nalu_size;
1285 }
1286 break;
1287 }
1288 case 26:
1289 /* MTAP16 Multi-time aggregation packet 5.7.2 */
1290 // header_len = 5;
1291 /* fallthrough, not implemented */
1292 case 27:
1293 /* MTAP24 Multi-time aggregation packet 5.7.2 */
1294 // header_len = 6;
1295 goto not_implemented;
1296 break;
1297 case 28:
1298 case 29:
1299 {
1300 /* FU-A Fragmentation unit 5.8 */
1301 /* FU-B Fragmentation unit 5.8 */
1302 gboolean S, E;
1303
1304 /* +---------------+
1305 * |0|1|2|3|4|5|6|7|
1306 * +-+-+-+-+-+-+-+-+
1307 * |S|E|R| Type |
1308 * +---------------+
1309 *
1310 * R is reserved and always 0
1311 */
1312 S = (payload[1] & 0x80) == 0x80;
1313 E = (payload[1] & 0x40) == 0x40;
1314
1315 GST_DEBUG_OBJECT (rtph264depay, "S %d, E %d", S, E);
1316
1317 if (rtph264depay->wait_start && !S)
1318 goto waiting_start;
1319
1320 if (S) {
1321 /* NAL unit starts here */
1322 guint8 nal_header;
1323
1324 /* If a new FU unit started, while still processing an older one.
1325 * Assume that the remote payloader is buggy (doesn't set the end
1326 * bit) and send out what we've gathered thusfar */
1327 if (G_UNLIKELY (rtph264depay->current_fu_type != 0))
1328 gst_rtp_h264_finish_fragmentation_unit (rtph264depay);
1329
1330 rtph264depay->current_fu_type = nal_unit_type;
1331 rtph264depay->fu_timestamp = timestamp;
1332 rtph264depay->last_fu_seqnum = gst_rtp_buffer_get_seq (rtp);
1333
1334 rtph264depay->wait_start = FALSE;
1335
1336 /* reconstruct NAL header */
1337 nal_header = (payload[0] & 0xe0) | (payload[1] & 0x1f);
1338
1339 /* strip type header, keep FU header, we'll reuse it to reconstruct
1340 * the NAL header. */
1341 payload += 1;
1342 payload_len -= 1;
1343
1344 nalu_size = payload_len;
1345 outsize = nalu_size + sizeof (sync_bytes);
1346 outbuf = gst_buffer_new_and_alloc (outsize);
1347
1348 gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
1349 memcpy (map.data + sizeof (sync_bytes), payload, nalu_size);
1350 map.data[sizeof (sync_bytes)] = nal_header;
1351 gst_buffer_unmap (outbuf, &map);
1352
1353 gst_rtp_copy_video_meta (rtph264depay, outbuf, rtp->buffer);
1354
1355 GST_DEBUG_OBJECT (rtph264depay, "queueing %d bytes", outsize);
1356
1357 /* and assemble in the adapter */
1358 gst_adapter_push (rtph264depay->adapter, outbuf);
1359 } else {
1360 if (rtph264depay->current_fu_type == 0) {
1361 /* previous FU packet missing start bit? */
1362 GST_WARNING_OBJECT (rtph264depay, "missing FU start bit on an "
1363 "earlier packet. Dropping.");
1364 gst_adapter_clear (rtph264depay->adapter);
1365 return NULL;
1366 }
1367 if (gst_rtp_buffer_compare_seqnum (rtph264depay->last_fu_seqnum,
1368 gst_rtp_buffer_get_seq (rtp)) != 1) {
1369 /* jump in sequence numbers within an FU is cause for discarding */
1370 GST_WARNING_OBJECT (rtph264depay, "Jump in sequence numbers from "
1371 "%u to %u within Fragmentation Unit. Data was lost, dropping "
1372 "stored.", rtph264depay->last_fu_seqnum,
1373 gst_rtp_buffer_get_seq (rtp));
1374 gst_adapter_clear (rtph264depay->adapter);
1375 return NULL;
1376 }
1377 rtph264depay->last_fu_seqnum = gst_rtp_buffer_get_seq (rtp);
1378
1379 /* strip off FU indicator and FU header bytes */
1380 payload += 2;
1381 payload_len -= 2;
1382
1383 outsize = payload_len;
1384 outbuf = gst_buffer_new_and_alloc (outsize);
1385 gst_buffer_fill (outbuf, 0, payload, outsize);
1386
1387 gst_rtp_copy_video_meta (rtph264depay, outbuf, rtp->buffer);
1388
1389 GST_DEBUG_OBJECT (rtph264depay, "queueing %d bytes", outsize);
1390
1391 /* and assemble in the adapter */
1392 gst_adapter_push (rtph264depay->adapter, outbuf);
1393 }
1394
1395 outbuf = NULL;
1396 rtph264depay->fu_marker = marker;
1397
1398 /* if NAL unit ends, flush the adapter */
1399 if (E)
1400 gst_rtp_h264_finish_fragmentation_unit (rtph264depay);
1401 break;
1402 }
1403 default:
1404 {
1405 rtph264depay->wait_start = FALSE;
1406
1407 /* 1-23 NAL unit Single NAL unit packet per H.264 5.6 */
1408 /* the entire payload is the output buffer */
1409 nalu_size = payload_len;
1410 outsize = nalu_size + sizeof (sync_bytes);
1411 outbuf = gst_buffer_new_and_alloc (outsize);
1412
1413 gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
1414 if (rtph264depay->byte_stream) {
1415 memcpy (map.data, sync_bytes, sizeof (sync_bytes));
1416 } else {
1417 map.data[0] = map.data[1] = 0;
1418 map.data[2] = nalu_size >> 8;
1419 map.data[3] = nalu_size & 0xff;
1420 }
1421 memcpy (map.data + sizeof (sync_bytes), payload, nalu_size);
1422 gst_buffer_unmap (outbuf, &map);
1423
1424 gst_rtp_copy_video_meta (rtph264depay, outbuf, rtp->buffer);
1425
1426 gst_rtp_h264_depay_handle_nal (rtph264depay, outbuf, timestamp, marker);
1427 break;
1428 }
1429 }
1430 }
1431
1432 return NULL;
1433
1434 /* ERRORS */
1435 empty_packet:
1436 {
1437 GST_DEBUG_OBJECT (rtph264depay, "empty packet");
1438 return NULL;
1439 }
1440 undefined_type:
1441 {
1442 GST_ELEMENT_WARNING (rtph264depay, STREAM, DECODE,
1443 (NULL), ("Undefined packet type"));
1444 return NULL;
1445 }
1446 waiting_start:
1447 {
1448 GST_DEBUG_OBJECT (rtph264depay, "waiting for start");
1449 return NULL;
1450 }
1451 not_implemented:
1452 {
1453 GST_ELEMENT_ERROR (rtph264depay, STREAM, FORMAT,
1454 (NULL), ("NAL unit type %d not supported yet", nal_unit_type));
1455 return NULL;
1456 }
1457 }
1458
1459 static gboolean
gst_rtp_h264_depay_handle_event(GstRTPBaseDepayload * depay,GstEvent * event)1460 gst_rtp_h264_depay_handle_event (GstRTPBaseDepayload * depay, GstEvent * event)
1461 {
1462 GstRtpH264Depay *rtph264depay;
1463
1464 rtph264depay = GST_RTP_H264_DEPAY (depay);
1465
1466 switch (GST_EVENT_TYPE (event)) {
1467 case GST_EVENT_FLUSH_STOP:
1468 gst_rtp_h264_depay_reset (rtph264depay, FALSE);
1469 break;
1470 case GST_EVENT_EOS:
1471 gst_rtp_h264_depay_drain (rtph264depay);
1472 break;
1473 default:
1474 break;
1475 }
1476
1477 return
1478 GST_RTP_BASE_DEPAYLOAD_CLASS (parent_class)->handle_event (depay, event);
1479 }
1480
1481 static GstStateChangeReturn
gst_rtp_h264_depay_change_state(GstElement * element,GstStateChange transition)1482 gst_rtp_h264_depay_change_state (GstElement * element,
1483 GstStateChange transition)
1484 {
1485 GstRtpH264Depay *rtph264depay;
1486 GstStateChangeReturn ret;
1487
1488 rtph264depay = GST_RTP_H264_DEPAY (element);
1489
1490 switch (transition) {
1491 case GST_STATE_CHANGE_NULL_TO_READY:
1492 break;
1493 case GST_STATE_CHANGE_READY_TO_PAUSED:
1494 gst_rtp_h264_depay_reset (rtph264depay, TRUE);
1495 break;
1496 default:
1497 break;
1498 }
1499
1500 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
1501
1502 switch (transition) {
1503 case GST_STATE_CHANGE_PAUSED_TO_READY:
1504 gst_rtp_h264_depay_reset (rtph264depay, TRUE);
1505 break;
1506 case GST_STATE_CHANGE_READY_TO_NULL:
1507 break;
1508 default:
1509 break;
1510 }
1511 return ret;
1512 }
1513