• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* ex: set tabstop=2 shiftwidth=2 expandtab: */
2 /* GStreamer
3  *
4  * This library is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Library General Public
6  * License as published by the Free Software Foundation; either
7  * version 2 of the License, or (at your option) any later version.
8  *
9  * This library is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12  * Library General Public License for more details.
13  *
14  * You should have received a copy of the GNU Library General Public
15  * License along with this library; if not, write to the
16  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17  * Boston, MA 02110-1301, USA.
18  */
19 
20 #ifdef HAVE_CONFIG_H
21 #  include "config.h"
22 #endif
23 
24 #include <string.h>
25 #include <stdlib.h>
26 
27 #include <gst/rtp/gstrtpbuffer.h>
28 #include <gst/pbutils/pbutils.h>
29 #include <gst/video/video.h>
30 
31 /* Included to not duplicate gst_rtp_h264_add_sps_pps () */
32 #include "gstrtph264depay.h"
33 
34 #include "gstrtpelements.h"
35 #include "gstrtph264pay.h"
36 #include "gstrtputils.h"
37 #include "gstbuffermemory.h"
38 
39 
40 #define IDR_TYPE_ID    5
41 #define SPS_TYPE_ID    7
42 #define PPS_TYPE_ID    8
43 #define AUD_TYPE_ID    9
44 #define STAP_A_TYPE_ID 24
45 #define FU_A_TYPE_ID   28
46 
47 GST_DEBUG_CATEGORY_STATIC (rtph264pay_debug);
48 #define GST_CAT_DEFAULT (rtph264pay_debug)
49 
50 #define GST_TYPE_RTP_H264_AGGREGATE_MODE \
51   (gst_rtp_h264_aggregate_mode_get_type ())
52 
53 
54 static GType
gst_rtp_h264_aggregate_mode_get_type(void)55 gst_rtp_h264_aggregate_mode_get_type (void)
56 {
57   static GType type = 0;
58   static const GEnumValue values[] = {
59     {GST_RTP_H264_AGGREGATE_NONE, "Do not aggregate NAL units", "none"},
60     {GST_RTP_H264_AGGREGATE_ZERO_LATENCY,
61         "Aggregate NAL units until a VCL unit is included", "zero-latency"},
62     {GST_RTP_H264_AGGREGATE_MAX_STAP,
63         "Aggregate all NAL units with the same timestamp (adds one frame of"
64           " latency)", "max-stap"},
65     {0, NULL, NULL},
66   };
67 
68   if (!type) {
69     type = g_enum_register_static ("GstRtpH264AggregateMode", values);
70   }
71   return type;
72 }
73 
74 
75 
76 /* references:
77 *
78  * RFC 3984
79  */
80 
81 static GstStaticPadTemplate gst_rtp_h264_pay_sink_template =
82     GST_STATIC_PAD_TEMPLATE ("sink",
83     GST_PAD_SINK,
84     GST_PAD_ALWAYS,
85     GST_STATIC_CAPS ("video/x-h264, "
86         "stream-format = (string) avc, alignment = (string) au;"
87         "video/x-h264, "
88         "stream-format = (string) byte-stream, alignment = (string) { nal, au }")
89     );
90 
91 static GstStaticPadTemplate gst_rtp_h264_pay_src_template =
92 GST_STATIC_PAD_TEMPLATE ("src",
93     GST_PAD_SRC,
94     GST_PAD_ALWAYS,
95     GST_STATIC_CAPS ("application/x-rtp, "
96         "media = (string) \"video\", "
97         "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
98         "clock-rate = (int) 90000, " "encoding-name = (string) \"H264\"")
99     );
100 
101 #define DEFAULT_SPROP_PARAMETER_SETS    NULL
102 #define DEFAULT_CONFIG_INTERVAL         0
103 #define DEFAULT_AGGREGATE_MODE          GST_RTP_H264_AGGREGATE_NONE
104 
105 enum
106 {
107   PROP_0,
108   PROP_SPROP_PARAMETER_SETS,
109   PROP_CONFIG_INTERVAL,
110   PROP_AGGREGATE_MODE,
111 };
112 
113 static void gst_rtp_h264_pay_finalize (GObject * object);
114 
115 static void gst_rtp_h264_pay_set_property (GObject * object, guint prop_id,
116     const GValue * value, GParamSpec * pspec);
117 static void gst_rtp_h264_pay_get_property (GObject * object, guint prop_id,
118     GValue * value, GParamSpec * pspec);
119 
120 static GstCaps *gst_rtp_h264_pay_getcaps (GstRTPBasePayload * payload,
121     GstPad * pad, GstCaps * filter);
122 static gboolean gst_rtp_h264_pay_setcaps (GstRTPBasePayload * basepayload,
123     GstCaps * caps);
124 static GstFlowReturn gst_rtp_h264_pay_handle_buffer (GstRTPBasePayload * pad,
125     GstBuffer * buffer);
126 static gboolean gst_rtp_h264_pay_sink_event (GstRTPBasePayload * payload,
127     GstEvent * event);
128 static GstStateChangeReturn gst_rtp_h264_pay_change_state (GstElement *
129     element, GstStateChange transition);
130 static gboolean gst_rtp_h264_pay_src_query (GstPad * pad, GstObject * parent,
131     GstQuery * query);
132 
133 static void gst_rtp_h264_pay_reset_bundle (GstRtpH264Pay * rtph264pay);
134 
135 #define gst_rtp_h264_pay_parent_class parent_class
136 G_DEFINE_TYPE (GstRtpH264Pay, gst_rtp_h264_pay, GST_TYPE_RTP_BASE_PAYLOAD);
137 GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtph264pay, "rtph264pay",
138     GST_RANK_SECONDARY, GST_TYPE_RTP_H264_PAY, rtp_element_init (plugin));
139 
140 static void
gst_rtp_h264_pay_class_init(GstRtpH264PayClass * klass)141 gst_rtp_h264_pay_class_init (GstRtpH264PayClass * klass)
142 {
143   GObjectClass *gobject_class;
144   GstElementClass *gstelement_class;
145   GstRTPBasePayloadClass *gstrtpbasepayload_class;
146 
147   gobject_class = (GObjectClass *) klass;
148   gstelement_class = (GstElementClass *) klass;
149   gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
150 
151   gobject_class->set_property = gst_rtp_h264_pay_set_property;
152   gobject_class->get_property = gst_rtp_h264_pay_get_property;
153 
154   g_object_class_install_property (G_OBJECT_CLASS (klass),
155       PROP_SPROP_PARAMETER_SETS, g_param_spec_string ("sprop-parameter-sets",
156           "sprop-parameter-sets",
157           "The base64 sprop-parameter-sets to set in out caps (set to NULL to "
158           "extract from stream)",
159           DEFAULT_SPROP_PARAMETER_SETS,
160           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED));
161 
162   g_object_class_install_property (G_OBJECT_CLASS (klass),
163       PROP_CONFIG_INTERVAL,
164       g_param_spec_int ("config-interval",
165           "SPS PPS Send Interval",
166           "Send SPS and PPS Insertion Interval in seconds (sprop parameter sets "
167           "will be multiplexed in the data stream when detected.) "
168           "(0 = disabled, -1 = send with every IDR frame)",
169           -1, 3600, DEFAULT_CONFIG_INTERVAL,
170           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
171       );
172 
173   /**
174    * GstRtpH264Pay:aggregate-mode
175    *
176    * Bundle suitable SPS/PPS NAL units into STAP-A aggregate packets.
177    *
178    * This can potentially reduce RTP packetization overhead but not all
179    * RTP implementations handle it correctly.
180    *
181    * For best compatibility, it is recommended to set this to "none" (the
182    * default) for RTSP and for WebRTC to "zero-latency".
183    *
184    * Since: 1.18
185    */
186   g_object_class_install_property (G_OBJECT_CLASS (klass),
187       PROP_AGGREGATE_MODE,
188       g_param_spec_enum ("aggregate-mode",
189           "Attempt to use aggregate packets",
190           "Bundle suitable SPS/PPS NAL units into STAP-A "
191           "aggregate packets",
192           GST_TYPE_RTP_H264_AGGREGATE_MODE,
193           DEFAULT_AGGREGATE_MODE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
194       );
195 
196   gobject_class->finalize = gst_rtp_h264_pay_finalize;
197 
198   gst_element_class_add_static_pad_template (gstelement_class,
199       &gst_rtp_h264_pay_src_template);
200   gst_element_class_add_static_pad_template (gstelement_class,
201       &gst_rtp_h264_pay_sink_template);
202 
203   gst_element_class_set_static_metadata (gstelement_class, "RTP H264 payloader",
204       "Codec/Payloader/Network/RTP",
205       "Payload-encode H264 video into RTP packets (RFC 3984)",
206       "Laurent Glayal <spglegle@yahoo.fr>");
207 
208   gstelement_class->change_state =
209       GST_DEBUG_FUNCPTR (gst_rtp_h264_pay_change_state);
210 
211   gstrtpbasepayload_class->get_caps = gst_rtp_h264_pay_getcaps;
212   gstrtpbasepayload_class->set_caps = gst_rtp_h264_pay_setcaps;
213   gstrtpbasepayload_class->handle_buffer = gst_rtp_h264_pay_handle_buffer;
214   gstrtpbasepayload_class->sink_event = gst_rtp_h264_pay_sink_event;
215 
216   GST_DEBUG_CATEGORY_INIT (rtph264pay_debug, "rtph264pay", 0,
217       "H264 RTP Payloader");
218 
219   gst_type_mark_as_plugin_api (GST_TYPE_RTP_H264_AGGREGATE_MODE, 0);
220 }
221 
222 static void
gst_rtp_h264_pay_init(GstRtpH264Pay * rtph264pay)223 gst_rtp_h264_pay_init (GstRtpH264Pay * rtph264pay)
224 {
225   rtph264pay->queue = g_array_new (FALSE, FALSE, sizeof (guint));
226   rtph264pay->profile_level = 0;
227   rtph264pay->sps = g_ptr_array_new_with_free_func (
228       (GDestroyNotify) gst_buffer_unref);
229   rtph264pay->pps = g_ptr_array_new_with_free_func (
230       (GDestroyNotify) gst_buffer_unref);
231   rtph264pay->last_spspps = -1;
232   rtph264pay->spspps_interval = DEFAULT_CONFIG_INTERVAL;
233   rtph264pay->aggregate_mode = DEFAULT_AGGREGATE_MODE;
234   rtph264pay->delta_unit = FALSE;
235   rtph264pay->discont = FALSE;
236 
237   rtph264pay->adapter = gst_adapter_new ();
238 
239   gst_pad_set_query_function (GST_RTP_BASE_PAYLOAD_SRCPAD (rtph264pay),
240       gst_rtp_h264_pay_src_query);
241 }
242 
243 static void
gst_rtp_h264_pay_clear_sps_pps(GstRtpH264Pay * rtph264pay)244 gst_rtp_h264_pay_clear_sps_pps (GstRtpH264Pay * rtph264pay)
245 {
246   g_ptr_array_set_size (rtph264pay->sps, 0);
247   g_ptr_array_set_size (rtph264pay->pps, 0);
248 }
249 
250 static void
gst_rtp_h264_pay_finalize(GObject * object)251 gst_rtp_h264_pay_finalize (GObject * object)
252 {
253   GstRtpH264Pay *rtph264pay;
254 
255   rtph264pay = GST_RTP_H264_PAY (object);
256 
257   g_array_free (rtph264pay->queue, TRUE);
258 
259   g_ptr_array_free (rtph264pay->sps, TRUE);
260   g_ptr_array_free (rtph264pay->pps, TRUE);
261 
262   g_free (rtph264pay->sprop_parameter_sets);
263 
264   g_object_unref (rtph264pay->adapter);
265   gst_rtp_h264_pay_reset_bundle (rtph264pay);
266 
267   G_OBJECT_CLASS (parent_class)->finalize (object);
268 }
269 
270 static const gchar all_levels[][4] = {
271   "1",
272   "1b",
273   "1.1",
274   "1.2",
275   "1.3",
276   "2",
277   "2.1",
278   "2.2",
279   "3",
280   "3.1",
281   "3.2",
282   "4",
283   "4.1",
284   "4.2",
285   "5",
286   "5.1"
287 };
288 
289 static GstCaps *
gst_rtp_h264_pay_getcaps(GstRTPBasePayload * payload,GstPad * pad,GstCaps * filter)290 gst_rtp_h264_pay_getcaps (GstRTPBasePayload * payload, GstPad * pad,
291     GstCaps * filter)
292 {
293   GstCaps *template_caps;
294   GstCaps *allowed_caps;
295   GstCaps *caps, *icaps;
296   gboolean append_unrestricted;
297   guint i;
298 
299   allowed_caps =
300       gst_pad_peer_query_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload), NULL);
301 
302   if (allowed_caps == NULL)
303     return NULL;
304 
305   template_caps =
306       gst_static_pad_template_get_caps (&gst_rtp_h264_pay_sink_template);
307 
308   if (gst_caps_is_any (allowed_caps)) {
309     caps = gst_caps_ref (template_caps);
310     goto done;
311   }
312 
313   if (gst_caps_is_empty (allowed_caps)) {
314     caps = gst_caps_ref (allowed_caps);
315     goto done;
316   }
317 
318   caps = gst_caps_new_empty ();
319 
320   append_unrestricted = FALSE;
321   for (i = 0; i < gst_caps_get_size (allowed_caps); i++) {
322     GstStructure *s = gst_caps_get_structure (allowed_caps, i);
323     GstStructure *new_s = gst_structure_new_empty ("video/x-h264");
324     const gchar *profile_level_id, *profile;
325 
326     profile_level_id = gst_structure_get_string (s, "profile-level-id");
327 
328     if (profile_level_id && strlen (profile_level_id) == 6) {
329       const gchar *profile;
330       const gchar *level;
331       long int spsint;
332       guint8 sps[3];
333 
334       spsint = strtol (profile_level_id, NULL, 16);
335       sps[0] = spsint >> 16;
336       sps[1] = spsint >> 8;
337       sps[2] = spsint;
338 
339       profile = gst_codec_utils_h264_get_profile (sps, 3);
340       level = gst_codec_utils_h264_get_level (sps, 3);
341 
342       if (profile && level) {
343         GST_LOG_OBJECT (payload, "In caps, have profile %s and level %s",
344             profile, level);
345 
346         if (!strcmp (profile, "constrained-baseline")) {
347           gst_structure_set (new_s, "profile", G_TYPE_STRING, profile, NULL);
348         } else {
349           GValue val = { 0, };
350           GValue profiles = { 0, };
351 
352           g_value_init (&profiles, GST_TYPE_LIST);
353           g_value_init (&val, G_TYPE_STRING);
354 
355           g_value_set_static_string (&val, profile);
356           gst_value_list_append_value (&profiles, &val);
357 
358           g_value_set_static_string (&val, "constrained-baseline");
359           gst_value_list_append_value (&profiles, &val);
360 
361           gst_structure_take_value (new_s, "profile", &profiles);
362         }
363 
364         if (!strcmp (level, "1"))
365           gst_structure_set (new_s, "level", G_TYPE_STRING, level, NULL);
366         else {
367           GValue levels = { 0, };
368           GValue val = { 0, };
369           int j;
370 
371           g_value_init (&levels, GST_TYPE_LIST);
372           g_value_init (&val, G_TYPE_STRING);
373 
374           for (j = 0; j < G_N_ELEMENTS (all_levels); j++) {
375             g_value_set_static_string (&val, all_levels[j]);
376             gst_value_list_prepend_value (&levels, &val);
377             if (!strcmp (level, all_levels[j]))
378               break;
379           }
380           gst_structure_take_value (new_s, "level", &levels);
381         }
382       } else {
383         /* Invalid profile-level-id means baseline */
384 
385         gst_structure_set (new_s,
386             "profile", G_TYPE_STRING, "constrained-baseline", NULL);
387       }
388     } else if ((profile = gst_structure_get_string (s, "profile"))) {
389       gst_structure_set (new_s, "profile", G_TYPE_STRING, profile, NULL);
390     } else {
391       /* No profile-level-id means baseline or unrestricted */
392 
393       gst_structure_set (new_s,
394           "profile", G_TYPE_STRING, "constrained-baseline", NULL);
395       append_unrestricted = TRUE;
396     }
397 
398     caps = gst_caps_merge_structure (caps, new_s);
399   }
400 
401   if (append_unrestricted) {
402     caps =
403         gst_caps_merge_structure (caps, gst_structure_new ("video/x-h264", NULL,
404             NULL));
405   }
406 
407   icaps = gst_caps_intersect (caps, template_caps);
408   gst_caps_unref (caps);
409   caps = icaps;
410 
411 done:
412   if (filter) {
413     GST_DEBUG_OBJECT (payload, "Intersect %" GST_PTR_FORMAT " and filter %"
414         GST_PTR_FORMAT, caps, filter);
415     icaps = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
416     gst_caps_unref (caps);
417     caps = icaps;
418   }
419 
420   gst_caps_unref (template_caps);
421   gst_caps_unref (allowed_caps);
422 
423   GST_LOG_OBJECT (payload, "returning caps %" GST_PTR_FORMAT, caps);
424   return caps;
425 }
426 
427 static gboolean
gst_rtp_h264_pay_src_query(GstPad * pad,GstObject * parent,GstQuery * query)428 gst_rtp_h264_pay_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
429 {
430   GstRtpH264Pay *rtph264pay = GST_RTP_H264_PAY (parent);
431 
432   if (GST_QUERY_TYPE (query) == GST_QUERY_LATENCY) {
433     gboolean retval;
434     gboolean live;
435     GstClockTime min_latency, max_latency;
436 
437     retval = gst_pad_query_default (pad, parent, query);
438     if (!retval)
439       return retval;
440 
441     if (rtph264pay->stream_format == GST_H264_STREAM_FORMAT_UNKNOWN ||
442         rtph264pay->alignment == GST_H264_ALIGNMENT_UNKNOWN)
443       return FALSE;
444 
445     gst_query_parse_latency (query, &live, &min_latency, &max_latency);
446 
447     if (rtph264pay->aggregate_mode == GST_RTP_H264_AGGREGATE_MAX_STAP &&
448         rtph264pay->alignment != GST_H264_ALIGNMENT_AU && rtph264pay->fps_num) {
449       GstClockTime one_frame = gst_util_uint64_scale_int (GST_SECOND,
450           rtph264pay->fps_denum, rtph264pay->fps_num);
451 
452       min_latency += one_frame;
453       max_latency += one_frame;
454       gst_query_set_latency (query, live, min_latency, max_latency);
455     }
456     return TRUE;
457   }
458 
459   return gst_pad_query_default (pad, parent, query);
460 }
461 
462 /* take the currently configured SPS and PPS lists and set them on the caps as
463  * sprop-parameter-sets */
464 static gboolean
gst_rtp_h264_pay_set_sps_pps(GstRTPBasePayload * basepayload)465 gst_rtp_h264_pay_set_sps_pps (GstRTPBasePayload * basepayload)
466 {
467   GstStructure *s = gst_structure_new_empty ("unused");
468   GstRtpH264Pay *payloader = GST_RTP_H264_PAY (basepayload);
469   gchar *set;
470   GString *sprops;
471   guint count;
472   gboolean res;
473   GstMapInfo map;
474   guint i;
475 
476   sprops = g_string_new ("");
477   count = 0;
478 
479   /* build the sprop-parameter-sets */
480   for (i = 0; i < payloader->sps->len; i++) {
481     GstBuffer *sps_buf =
482         GST_BUFFER_CAST (g_ptr_array_index (payloader->sps, i));
483 
484     gst_buffer_map (sps_buf, &map, GST_MAP_READ);
485     set = g_base64_encode (map.data, map.size);
486     gst_buffer_unmap (sps_buf, &map);
487 
488     g_string_append_printf (sprops, "%s%s", count ? "," : "", set);
489     g_free (set);
490     count++;
491   }
492   for (i = 0; i < payloader->pps->len; i++) {
493     GstBuffer *pps_buf =
494         GST_BUFFER_CAST (g_ptr_array_index (payloader->pps, i));
495 
496     gst_buffer_map (pps_buf, &map, GST_MAP_READ);
497     set = g_base64_encode (map.data, map.size);
498     gst_buffer_unmap (pps_buf, &map);
499 
500     g_string_append_printf (sprops, "%s%s", count ? "," : "", set);
501     g_free (set);
502     count++;
503   }
504 
505   if (G_LIKELY (count)) {
506     gchar *profile_level;
507 
508     gst_structure_set (s,
509         "packetization-mode", G_TYPE_STRING, "1",
510         "sprop-parameter-sets", G_TYPE_STRING, sprops->str, NULL);
511 
512     if (payloader->profile_level != 0) {
513       guint8 sps[2] = {
514         payloader->profile_level >> 16,
515         payloader->profile_level >> 8,
516       };
517 
518       profile_level =
519           g_strdup_printf ("%06x", payloader->profile_level & 0xffffff);
520       gst_structure_set (s,
521           "profile-level-id", G_TYPE_STRING, profile_level,
522           "profile", G_TYPE_STRING, gst_codec_utils_h264_get_profile (sps, 2),
523           NULL);
524 
525       g_free (profile_level);
526     }
527 
528     /* combine into output caps */
529     res = gst_rtp_base_payload_set_outcaps_structure (basepayload, s);
530   } else {
531     res = gst_rtp_base_payload_set_outcaps (basepayload, NULL);
532   }
533   gst_structure_free (s);
534   g_string_free (sprops, TRUE);
535 
536   return res;
537 }
538 
539 
540 static gboolean
gst_rtp_h264_pay_setcaps(GstRTPBasePayload * basepayload,GstCaps * caps)541 gst_rtp_h264_pay_setcaps (GstRTPBasePayload * basepayload, GstCaps * caps)
542 {
543   GstRtpH264Pay *rtph264pay;
544   GstStructure *str;
545   const GValue *value;
546   GstMapInfo map;
547   guint8 *data;
548   gsize size;
549   GstBuffer *buffer;
550   const gchar *alignment, *stream_format;
551 
552   rtph264pay = GST_RTP_H264_PAY (basepayload);
553 
554   str = gst_caps_get_structure (caps, 0);
555 
556   /* we can only set the output caps when we found the sprops and profile
557    * NALs */
558   gst_rtp_base_payload_set_options (basepayload, "video", TRUE, "H264", 90000);
559 
560   rtph264pay->alignment = GST_H264_ALIGNMENT_UNKNOWN;
561   alignment = gst_structure_get_string (str, "alignment");
562   if (alignment) {
563     if (g_str_equal (alignment, "au"))
564       rtph264pay->alignment = GST_H264_ALIGNMENT_AU;
565     if (g_str_equal (alignment, "nal"))
566       rtph264pay->alignment = GST_H264_ALIGNMENT_NAL;
567   }
568 
569   rtph264pay->stream_format = GST_H264_STREAM_FORMAT_UNKNOWN;
570   stream_format = gst_structure_get_string (str, "stream-format");
571   if (stream_format) {
572     if (g_str_equal (stream_format, "avc"))
573       rtph264pay->stream_format = GST_H264_STREAM_FORMAT_AVC;
574     if (g_str_equal (stream_format, "byte-stream"))
575       rtph264pay->stream_format = GST_H264_STREAM_FORMAT_BYTESTREAM;
576   }
577 
578   if (!gst_structure_get_fraction (str, "framerate", &rtph264pay->fps_num,
579           &rtph264pay->fps_denum))
580     rtph264pay->fps_num = rtph264pay->fps_denum = 0;
581 
582   /* packetized AVC video has a codec_data */
583   if ((value = gst_structure_get_value (str, "codec_data"))) {
584     guint num_sps, num_pps;
585     gint i, nal_size;
586 
587     GST_DEBUG_OBJECT (rtph264pay, "have packetized h264");
588 
589     buffer = gst_value_get_buffer (value);
590 
591     gst_buffer_map (buffer, &map, GST_MAP_READ);
592     data = map.data;
593     size = map.size;
594 
595     /* parse the avcC data */
596     if (size < 7)
597       goto avcc_too_small;
598     /* parse the version, this must be 1 */
599     if (data[0] != 1)
600       goto wrong_version;
601 
602     /* AVCProfileIndication */
603     /* profile_compat */
604     /* AVCLevelIndication */
605     rtph264pay->profile_level = (data[1] << 16) | (data[2] << 8) | data[3];
606     GST_DEBUG_OBJECT (rtph264pay, "profile %06x", rtph264pay->profile_level);
607 
608     /* 6 bits reserved | 2 bits lengthSizeMinusOne */
609     /* this is the number of bytes in front of the NAL units to mark their
610      * length */
611     rtph264pay->nal_length_size = (data[4] & 0x03) + 1;
612     GST_DEBUG_OBJECT (rtph264pay, "nal length %u", rtph264pay->nal_length_size);
613     /* 3 bits reserved | 5 bits numOfSequenceParameterSets */
614     num_sps = data[5] & 0x1f;
615     GST_DEBUG_OBJECT (rtph264pay, "num SPS %u", num_sps);
616 
617     data += 6;
618     size -= 6;
619 
620     /* create the sprop-parameter-sets */
621     for (i = 0; i < num_sps; i++) {
622       GstBuffer *sps_buf;
623 
624       if (size < 2)
625         goto avcc_error;
626 
627       nal_size = (data[0] << 8) | data[1];
628       data += 2;
629       size -= 2;
630 
631       GST_LOG_OBJECT (rtph264pay, "SPS %d size %d", i, nal_size);
632 
633       if (size < nal_size)
634         goto avcc_error;
635 
636       /* make a buffer out of it and add to SPS list */
637       sps_buf = gst_buffer_new_and_alloc (nal_size);
638       gst_buffer_fill (sps_buf, 0, data, nal_size);
639       gst_rtp_h264_add_sps_pps (GST_ELEMENT (rtph264pay), rtph264pay->sps,
640           rtph264pay->pps, sps_buf);
641       data += nal_size;
642       size -= nal_size;
643     }
644     if (size < 1)
645       goto avcc_error;
646 
647     /* 8 bits numOfPictureParameterSets */
648     num_pps = data[0];
649     data += 1;
650     size -= 1;
651 
652     GST_DEBUG_OBJECT (rtph264pay, "num PPS %u", num_pps);
653     for (i = 0; i < num_pps; i++) {
654       GstBuffer *pps_buf;
655 
656       if (size < 2)
657         goto avcc_error;
658 
659       nal_size = (data[0] << 8) | data[1];
660       data += 2;
661       size -= 2;
662 
663       GST_LOG_OBJECT (rtph264pay, "PPS %d size %d", i, nal_size);
664 
665       if (size < nal_size)
666         goto avcc_error;
667 
668       /* make a buffer out of it and add to PPS list */
669       pps_buf = gst_buffer_new_and_alloc (nal_size);
670       gst_buffer_fill (pps_buf, 0, data, nal_size);
671       gst_rtp_h264_add_sps_pps (GST_ELEMENT (rtph264pay), rtph264pay->sps,
672           rtph264pay->pps, pps_buf);
673 
674       data += nal_size;
675       size -= nal_size;
676     }
677 
678     /* and update the caps with the collected data */
679     if (!gst_rtp_h264_pay_set_sps_pps (basepayload))
680       goto set_sps_pps_failed;
681 
682     gst_buffer_unmap (buffer, &map);
683   } else {
684     GST_DEBUG_OBJECT (rtph264pay, "have bytestream h264");
685   }
686 
687   return TRUE;
688 
689 avcc_too_small:
690   {
691     GST_ERROR_OBJECT (rtph264pay, "avcC size %" G_GSIZE_FORMAT " < 7", size);
692     goto error;
693   }
694 wrong_version:
695   {
696     GST_ERROR_OBJECT (rtph264pay, "wrong avcC version");
697     goto error;
698   }
699 avcc_error:
700   {
701     GST_ERROR_OBJECT (rtph264pay, "avcC too small ");
702     goto error;
703   }
704 set_sps_pps_failed:
705   {
706     GST_ERROR_OBJECT (rtph264pay, "failed to set sps/pps");
707     goto error;
708   }
709 error:
710   {
711     gst_buffer_unmap (buffer, &map);
712     return FALSE;
713   }
714 }
715 
716 static void
gst_rtp_h264_pay_parse_sprop_parameter_sets(GstRtpH264Pay * rtph264pay)717 gst_rtp_h264_pay_parse_sprop_parameter_sets (GstRtpH264Pay * rtph264pay)
718 {
719   const gchar *ps;
720   gchar **params;
721   guint len;
722   gint i;
723   GstBuffer *buf;
724 
725   ps = rtph264pay->sprop_parameter_sets;
726   if (ps == NULL)
727     return;
728 
729   gst_rtp_h264_pay_clear_sps_pps (rtph264pay);
730 
731   params = g_strsplit (ps, ",", 0);
732   len = g_strv_length (params);
733 
734   GST_DEBUG_OBJECT (rtph264pay, "we have %d params", len);
735 
736   for (i = 0; params[i]; i++) {
737     gsize nal_len;
738     GstMapInfo map;
739     guint8 *nalp;
740     guint save = 0;
741     gint state = 0;
742 
743     nal_len = strlen (params[i]);
744     buf = gst_buffer_new_and_alloc (nal_len);
745 
746     gst_buffer_map (buf, &map, GST_MAP_WRITE);
747     nalp = map.data;
748     nal_len = g_base64_decode_step (params[i], nal_len, nalp, &state, &save);
749     gst_buffer_unmap (buf, &map);
750     gst_buffer_resize (buf, 0, nal_len);
751 
752     if (!nal_len) {
753       gst_buffer_unref (buf);
754       continue;
755     }
756 
757     gst_rtp_h264_add_sps_pps (GST_ELEMENT (rtph264pay), rtph264pay->sps,
758         rtph264pay->pps, buf);
759   }
760   g_strfreev (params);
761 }
762 
763 static guint
next_start_code(const guint8 * data,guint size)764 next_start_code (const guint8 * data, guint size)
765 {
766   /* Boyer-Moore string matching algorithm, in a degenerative
767    * sense because our search 'alphabet' is binary - 0 & 1 only.
768    * This allow us to simplify the general BM algorithm to a very
769    * simple form. */
770   /* assume 1 is in the 3th byte */
771   guint offset = 2;
772 
773   while (offset < size) {
774     if (1 == data[offset]) {
775       unsigned int shift = offset;
776 
777       if (0 == data[--shift]) {
778         if (0 == data[--shift]) {
779           return shift;
780         }
781       }
782       /* The jump is always 3 because of the 1 previously matched.
783        * All the 0's must be after this '1' matched at offset */
784       offset += 3;
785     } else if (0 == data[offset]) {
786       /* maybe next byte is 1? */
787       offset++;
788     } else {
789       /* can jump 3 bytes forward */
790       offset += 3;
791     }
792     /* at each iteration, we rescan in a backward manner until
793      * we match 0.0.1 in reverse order. Since our search string
794      * has only 2 'alpabets' (i.e. 0 & 1), we know that any
795      * mismatch will force us to shift a fixed number of steps */
796   }
797   GST_DEBUG ("Cannot find next NAL start code. returning %u", size);
798 
799   return size;
800 }
801 
802 static gboolean
gst_rtp_h264_pay_decode_nal(GstRtpH264Pay * payloader,const guint8 * data,guint size,GstClockTime dts,GstClockTime pts)803 gst_rtp_h264_pay_decode_nal (GstRtpH264Pay * payloader,
804     const guint8 * data, guint size, GstClockTime dts, GstClockTime pts)
805 {
806   guint8 header, type;
807   gboolean updated;
808 
809   /* default is no update */
810   updated = FALSE;
811 
812   GST_DEBUG ("NAL payload len=%u", size);
813 
814   header = data[0];
815   type = header & 0x1f;
816 
817   /* We record the timestamp of the last SPS/PPS so
818    * that we can insert them at regular intervals and when needed. */
819   if (SPS_TYPE_ID == type || PPS_TYPE_ID == type) {
820     GstBuffer *nal;
821 
822     /* trailing 0x0 are not part of the SPS/PPS */
823     while (size > 0 && data[size - 1] == 0x0)
824       size--;
825 
826     /* encode the entire SPS NAL in base64 */
827     GST_DEBUG ("Found %s %x %x %x Len=%u", type == SPS_TYPE_ID ? "SPS" : "PPS",
828         (header >> 7), (header >> 5) & 3, type, size);
829 
830     nal = gst_buffer_new_allocate (NULL, size, NULL);
831     gst_buffer_fill (nal, 0, data, size);
832 
833     updated = gst_rtp_h264_add_sps_pps (GST_ELEMENT (payloader),
834         payloader->sps, payloader->pps, nal);
835 
836     /* remember when we last saw SPS */
837     if (pts != -1)
838       payloader->last_spspps =
839           gst_segment_to_running_time (&GST_RTP_BASE_PAYLOAD_CAST
840           (payloader)->segment, GST_FORMAT_TIME, pts);
841   } else {
842     GST_DEBUG ("NAL: %x %x %x Len = %u", (header >> 7),
843         (header >> 5) & 3, type, size);
844   }
845 
846   return updated;
847 }
848 
849 static GstFlowReturn
850 gst_rtp_h264_pay_payload_nal (GstRTPBasePayload * basepayload,
851     GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean end_of_au,
852     gboolean delta_unit, gboolean discont);
853 
854 static GstFlowReturn
855 gst_rtp_h264_pay_payload_nal_single (GstRTPBasePayload * basepayload,
856     GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean end_of_au,
857     gboolean delta_unit, gboolean discont);
858 
859 static GstFlowReturn
860 gst_rtp_h264_pay_payload_nal_fragment (GstRTPBasePayload * basepayload,
861     GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean end_of_au,
862     gboolean delta_unit, gboolean discont, guint8 nal_header);
863 
864 static GstFlowReturn
865 gst_rtp_h264_pay_payload_nal_bundle (GstRTPBasePayload * basepayload,
866     GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean end_of_au,
867     gboolean delta_unit, gboolean discont, guint8 nal_header);
868 
869 static GstFlowReturn
gst_rtp_h264_pay_send_sps_pps(GstRTPBasePayload * basepayload,GstClockTime dts,GstClockTime pts,gboolean delta_unit,gboolean discont)870 gst_rtp_h264_pay_send_sps_pps (GstRTPBasePayload * basepayload,
871     GstClockTime dts, GstClockTime pts, gboolean delta_unit, gboolean discont)
872 {
873   GstRtpH264Pay *rtph264pay = GST_RTP_H264_PAY (basepayload);
874   GstFlowReturn ret = GST_FLOW_OK;
875   gboolean sent_all_sps_pps = TRUE;
876   guint i;
877 
878   for (i = 0; i < rtph264pay->sps->len; i++) {
879     GstBuffer *sps_buf =
880         GST_BUFFER_CAST (g_ptr_array_index (rtph264pay->sps, i));
881 
882     GST_DEBUG_OBJECT (rtph264pay, "inserting SPS in the stream");
883     /* resend SPS */
884     ret = gst_rtp_h264_pay_payload_nal (basepayload, gst_buffer_ref (sps_buf),
885         dts, pts, FALSE, delta_unit, discont);
886     /* Not critical here; but throw a warning */
887     if (ret != GST_FLOW_OK) {
888       sent_all_sps_pps = FALSE;
889       GST_WARNING_OBJECT (basepayload, "Problem pushing SPS");
890     }
891   }
892   for (i = 0; i < rtph264pay->pps->len; i++) {
893     GstBuffer *pps_buf =
894         GST_BUFFER_CAST (g_ptr_array_index (rtph264pay->pps, i));
895 
896     GST_DEBUG_OBJECT (rtph264pay, "inserting PPS in the stream");
897     /* resend PPS */
898     ret = gst_rtp_h264_pay_payload_nal (basepayload, gst_buffer_ref (pps_buf),
899         dts, pts, FALSE, TRUE, FALSE);
900     /* Not critical here; but throw a warning */
901     if (ret != GST_FLOW_OK) {
902       sent_all_sps_pps = FALSE;
903       GST_WARNING_OBJECT (basepayload, "Problem pushing PPS");
904     }
905   }
906 
907   if (pts != -1 && sent_all_sps_pps)
908     rtph264pay->last_spspps =
909         gst_segment_to_running_time (&basepayload->segment, GST_FORMAT_TIME,
910         pts);
911 
912   return ret;
913 }
914 
915 /* @delta_unit: if %FALSE the first packet sent won't have the
916  * GST_BUFFER_FLAG_DELTA_UNIT flag.
917  * @discont: if %TRUE the first packet sent will have the
918  * GST_BUFFER_FLAG_DISCONT flag.
919  */
920 static GstFlowReturn
gst_rtp_h264_pay_payload_nal(GstRTPBasePayload * basepayload,GstBuffer * paybuf,GstClockTime dts,GstClockTime pts,gboolean end_of_au,gboolean delta_unit,gboolean discont)921 gst_rtp_h264_pay_payload_nal (GstRTPBasePayload * basepayload,
922     GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean end_of_au,
923     gboolean delta_unit, gboolean discont)
924 {
925   GstRtpH264Pay *rtph264pay;
926   guint8 nal_header, nal_type;
927   gboolean send_spspps;
928   guint size;
929 
930   rtph264pay = GST_RTP_H264_PAY (basepayload);
931   size = gst_buffer_get_size (paybuf);
932 
933   gst_buffer_extract (paybuf, 0, &nal_header, 1);
934   nal_type = nal_header & 0x1f;
935 
936   /* These payload type are reserved for STAP-A, STAP-B, MTAP16, and MTAP24
937    * as internally used NAL types */
938   switch (nal_type) {
939     case 24:
940     case 25:
941     case 26:
942     case 27:
943       GST_WARNING_OBJECT (rtph264pay, "Ignoring reserved NAL TYPE=%d",
944           nal_type);
945       gst_buffer_unref (paybuf);
946       return GST_FLOW_OK;
947     default:
948       break;
949   }
950 
951   GST_DEBUG_OBJECT (rtph264pay,
952       "payloading NAL Unit: datasize=%u type=%d pts=%" GST_TIME_FORMAT,
953       size, nal_type, GST_TIME_ARGS (pts));
954 
955   /* should set src caps before pushing stuff,
956    * and if we did not see enough SPS/PPS, that may not be the case */
957   if (G_UNLIKELY (!gst_pad_has_current_caps (GST_RTP_BASE_PAYLOAD_SRCPAD
958               (basepayload))))
959     gst_rtp_h264_pay_set_sps_pps (basepayload);
960 
961   send_spspps = FALSE;
962 
963   /* check if we need to emit an SPS/PPS now */
964   if (nal_type == IDR_TYPE_ID && rtph264pay->spspps_interval > 0) {
965     if (rtph264pay->last_spspps != -1) {
966       guint64 diff;
967       GstClockTime running_time =
968           gst_segment_to_running_time (&basepayload->segment, GST_FORMAT_TIME,
969           pts);
970 
971       GST_LOG_OBJECT (rtph264pay,
972           "now %" GST_TIME_FORMAT ", last SPS/PPS %" GST_TIME_FORMAT,
973           GST_TIME_ARGS (running_time),
974           GST_TIME_ARGS (rtph264pay->last_spspps));
975 
976       /* calculate diff between last SPS/PPS in milliseconds */
977       if (running_time > rtph264pay->last_spspps)
978         diff = running_time - rtph264pay->last_spspps;
979       else
980         diff = 0;
981 
982       GST_DEBUG_OBJECT (rtph264pay,
983           "interval since last SPS/PPS %" GST_TIME_FORMAT,
984           GST_TIME_ARGS (diff));
985 
986       /* bigger than interval, queue SPS/PPS */
987       if (GST_TIME_AS_SECONDS (diff) >= rtph264pay->spspps_interval) {
988         GST_DEBUG_OBJECT (rtph264pay, "time to send SPS/PPS");
989         send_spspps = TRUE;
990       }
991     } else {
992       /* no know previous SPS/PPS time, send now */
993       GST_DEBUG_OBJECT (rtph264pay, "no previous SPS/PPS time, send now");
994       send_spspps = TRUE;
995     }
996   } else if (nal_type == IDR_TYPE_ID && rtph264pay->spspps_interval == -1) {
997     GST_DEBUG_OBJECT (rtph264pay, "sending SPS/PPS before current IDR frame");
998     /* send SPS/PPS before every IDR frame */
999     send_spspps = TRUE;
1000   }
1001 
1002   if (send_spspps || rtph264pay->send_spspps) {
1003     /* we need to send SPS/PPS now first. FIXME, don't use the pts for
1004      * checking when we need to send SPS/PPS but convert to running_time first. */
1005     GstFlowReturn ret;
1006 
1007     rtph264pay->send_spspps = FALSE;
1008 
1009     ret = gst_rtp_h264_pay_send_sps_pps (basepayload, dts, pts, delta_unit,
1010         discont);
1011     if (ret != GST_FLOW_OK) {
1012       gst_buffer_unref (paybuf);
1013       return ret;
1014     }
1015 
1016     delta_unit = TRUE;
1017     discont = FALSE;
1018   }
1019 
1020   if (rtph264pay->aggregate_mode != GST_RTP_H264_AGGREGATE_NONE)
1021     return gst_rtp_h264_pay_payload_nal_bundle (basepayload, paybuf, dts, pts,
1022         end_of_au, delta_unit, discont, nal_header);
1023 
1024   return gst_rtp_h264_pay_payload_nal_fragment (basepayload, paybuf, dts, pts,
1025       end_of_au, delta_unit, discont, nal_header);
1026 }
1027 
1028 static GstFlowReturn
gst_rtp_h264_pay_payload_nal_fragment(GstRTPBasePayload * basepayload,GstBuffer * paybuf,GstClockTime dts,GstClockTime pts,gboolean end_of_au,gboolean delta_unit,gboolean discont,guint8 nal_header)1029 gst_rtp_h264_pay_payload_nal_fragment (GstRTPBasePayload * basepayload,
1030     GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean end_of_au,
1031     gboolean delta_unit, gboolean discont, guint8 nal_header)
1032 {
1033   GstRtpH264Pay *rtph264pay;
1034   guint mtu, size, max_fragment_size, max_fragments, ii, pos;
1035   GstBuffer *outbuf;
1036   guint8 *payload;
1037   GstBufferList *list = NULL;
1038   GstRTPBuffer rtp = { NULL };
1039 
1040   rtph264pay = GST_RTP_H264_PAY (basepayload);
1041   mtu = GST_RTP_BASE_PAYLOAD_MTU (rtph264pay);
1042   size = gst_buffer_get_size (paybuf);
1043 
1044   if (gst_rtp_buffer_calc_packet_len (size, 0, 0) <= mtu) {
1045     /* We don't need to fragment this packet */
1046     GST_DEBUG_OBJECT (rtph264pay,
1047         "sending NAL Unit: datasize=%u mtu=%u", size, mtu);
1048     return gst_rtp_h264_pay_payload_nal_single (basepayload, paybuf, dts, pts,
1049         end_of_au, delta_unit, discont);
1050   }
1051 
1052   GST_DEBUG_OBJECT (basepayload,
1053       "using FU-A fragmentation for NAL Unit: datasize=%u mtu=%u", size, mtu);
1054 
1055   /* We keep 2 bytes for FU indicator and FU Header */
1056   max_fragment_size = gst_rtp_buffer_calc_payload_len (mtu - 2, 0, 0);
1057   max_fragments = (size + max_fragment_size - 2) / max_fragment_size;
1058   list = gst_buffer_list_new_sized (max_fragments);
1059 
1060   /* Start at the NALU payload */
1061   for (pos = 1, ii = 0; pos < size; pos += max_fragment_size, ii++) {
1062     guint remaining, fragment_size;
1063     gboolean first_fragment, last_fragment;
1064 
1065     remaining = size - pos;
1066     fragment_size = MIN (remaining, max_fragment_size);
1067     first_fragment = (pos == 1);
1068     last_fragment = (remaining <= max_fragment_size);
1069 
1070     GST_DEBUG_OBJECT (basepayload,
1071         "creating FU-A packet %u/%u, size %u",
1072         ii + 1, max_fragments, fragment_size);
1073 
1074     /* use buffer lists
1075      * create buffer without payload containing only the RTP header
1076      * (memory block at index 0) */
1077     outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload, 2, 0, 0);
1078 
1079     gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
1080 
1081     GST_BUFFER_DTS (outbuf) = dts;
1082     GST_BUFFER_PTS (outbuf) = pts;
1083     payload = gst_rtp_buffer_get_payload (&rtp);
1084 
1085     /* If it's the last fragment and the end of this au, mark the end of
1086      * slice */
1087     gst_rtp_buffer_set_marker (&rtp, last_fragment && end_of_au);
1088 
1089     /* FU indicator */
1090     payload[0] = (nal_header & 0x60) | FU_A_TYPE_ID;
1091 
1092     /* FU Header */
1093     payload[1] = (first_fragment << 7) | (last_fragment << 6) |
1094         (nal_header & 0x1f);
1095 
1096     gst_rtp_buffer_unmap (&rtp);
1097 
1098     /* insert payload memory block */
1099     gst_rtp_copy_video_meta (rtph264pay, outbuf, paybuf);
1100     gst_buffer_copy_into (outbuf, paybuf, GST_BUFFER_COPY_MEMORY, pos,
1101         fragment_size);
1102 
1103     if (!delta_unit)
1104       /* Only the first packet sent should not have the flag */
1105       delta_unit = TRUE;
1106     else
1107       GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
1108 
1109     if (discont) {
1110       GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
1111       /* Only the first packet sent should have the flag */
1112       discont = FALSE;
1113     }
1114 
1115     /* add the buffer to the buffer list */
1116     gst_buffer_list_add (list, outbuf);
1117   }
1118 
1119   GST_DEBUG_OBJECT (rtph264pay,
1120       "sending FU-A fragments: n=%u datasize=%u mtu=%u", ii, size, mtu);
1121 
1122   gst_buffer_unref (paybuf);
1123   return gst_rtp_base_payload_push_list (basepayload, list);
1124 }
1125 
1126 static GstFlowReturn
gst_rtp_h264_pay_payload_nal_single(GstRTPBasePayload * basepayload,GstBuffer * paybuf,GstClockTime dts,GstClockTime pts,gboolean end_of_au,gboolean delta_unit,gboolean discont)1127 gst_rtp_h264_pay_payload_nal_single (GstRTPBasePayload * basepayload,
1128     GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean end_of_au,
1129     gboolean delta_unit, gboolean discont)
1130 {
1131   GstRtpH264Pay *rtph264pay;
1132   GstBuffer *outbuf;
1133   GstRTPBuffer rtp = { NULL };
1134 
1135   rtph264pay = GST_RTP_H264_PAY (basepayload);
1136 
1137   /* create buffer without payload containing only the RTP header
1138    * (memory block at index 0) */
1139   outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload, 0, 0, 0);
1140 
1141   gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
1142 
1143   /* Mark the end of a frame */
1144   gst_rtp_buffer_set_marker (&rtp, end_of_au);
1145 
1146   /* timestamp the outbuffer */
1147   GST_BUFFER_PTS (outbuf) = pts;
1148   GST_BUFFER_DTS (outbuf) = dts;
1149 
1150   if (delta_unit)
1151     GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
1152 
1153   if (discont)
1154     GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
1155 
1156   gst_rtp_buffer_unmap (&rtp);
1157 
1158   /* insert payload memory block */
1159   gst_rtp_copy_video_meta (rtph264pay, outbuf, paybuf);
1160   outbuf = gst_buffer_append (outbuf, paybuf);
1161 
1162   /* push the buffer to the next element */
1163   return gst_rtp_base_payload_push (basepayload, outbuf);
1164 }
1165 
1166 static void
gst_rtp_h264_pay_reset_bundle(GstRtpH264Pay * rtph264pay)1167 gst_rtp_h264_pay_reset_bundle (GstRtpH264Pay * rtph264pay)
1168 {
1169   g_clear_pointer (&rtph264pay->bundle, gst_buffer_list_unref);
1170   rtph264pay->bundle_size = 0;
1171   rtph264pay->bundle_contains_vcl = FALSE;
1172 }
1173 
1174 static GstFlowReturn
gst_rtp_h264_pay_send_bundle(GstRtpH264Pay * rtph264pay,gboolean end_of_au)1175 gst_rtp_h264_pay_send_bundle (GstRtpH264Pay * rtph264pay, gboolean end_of_au)
1176 {
1177   GstRTPBasePayload *basepayload;
1178   GstBufferList *bundle;
1179   guint length, bundle_size;
1180   GstBuffer *first, *outbuf;
1181   GstClockTime dts, pts;
1182   gboolean delta, discont;
1183 
1184   bundle_size = rtph264pay->bundle_size;
1185 
1186   if (bundle_size == 0) {
1187     GST_DEBUG_OBJECT (rtph264pay, "no bundle, nothing to send");
1188     return GST_FLOW_OK;
1189   }
1190 
1191   basepayload = GST_RTP_BASE_PAYLOAD (rtph264pay);
1192   bundle = rtph264pay->bundle;
1193   length = gst_buffer_list_length (bundle);
1194 
1195   first = gst_buffer_list_get (bundle, 0);
1196   dts = GST_BUFFER_DTS (first);
1197   pts = GST_BUFFER_PTS (first);
1198   delta = GST_BUFFER_FLAG_IS_SET (first, GST_BUFFER_FLAG_DELTA_UNIT);
1199   discont = GST_BUFFER_FLAG_IS_SET (first, GST_BUFFER_FLAG_DISCONT);
1200 
1201   if (length == 1) {
1202     /* Push unaggregated NALU */
1203     outbuf = gst_buffer_ref (first);
1204 
1205     GST_DEBUG_OBJECT (rtph264pay,
1206         "sending NAL Unit unaggregated: datasize=%u", bundle_size - 2);
1207   } else {
1208     guint8 stap_header;
1209     guint i;
1210 
1211     outbuf = gst_buffer_new_allocate (NULL, sizeof stap_header, NULL);
1212     stap_header = STAP_A_TYPE_ID;
1213 
1214     for (i = 0; i < length; i++) {
1215       GstBuffer *buf = gst_buffer_list_get (bundle, i);
1216       guint8 nal_header;
1217       GstMemory *size_header;
1218       GstMapInfo map;
1219 
1220       gst_buffer_extract (buf, 0, &nal_header, sizeof nal_header);
1221 
1222       /* Propagate F bit */
1223       if ((nal_header & 0x80))
1224         stap_header |= 0x80;
1225 
1226       /* Select highest nal_ref_idc */
1227       if ((nal_header & 0x60) > (stap_header & 0x60))
1228         stap_header = (stap_header & 0x9f) | (nal_header & 0x60);
1229 
1230       /* append NALU size */
1231       size_header = gst_allocator_alloc (NULL, 2, NULL);
1232       gst_memory_map (size_header, &map, GST_MAP_WRITE);
1233       GST_WRITE_UINT16_BE (map.data, gst_buffer_get_size (buf));
1234       gst_memory_unmap (size_header, &map);
1235       gst_buffer_append_memory (outbuf, size_header);
1236 
1237       /* append NALU data */
1238       outbuf = gst_buffer_append (outbuf, gst_buffer_ref (buf));
1239     }
1240 
1241     gst_buffer_fill (outbuf, 0, &stap_header, sizeof stap_header);
1242 
1243     GST_DEBUG_OBJECT (rtph264pay,
1244         "sending STAP-A bundle: n=%u header=%02x datasize=%u",
1245         length, stap_header, bundle_size);
1246   }
1247 
1248   gst_rtp_h264_pay_reset_bundle (rtph264pay);
1249   return gst_rtp_h264_pay_payload_nal_single (basepayload, outbuf, dts, pts,
1250       end_of_au, delta, discont);
1251 }
1252 
1253 static gboolean
gst_rtp_h264_pay_payload_nal_bundle(GstRTPBasePayload * basepayload,GstBuffer * paybuf,GstClockTime dts,GstClockTime pts,gboolean end_of_au,gboolean delta_unit,gboolean discont,guint8 nal_header)1254 gst_rtp_h264_pay_payload_nal_bundle (GstRTPBasePayload * basepayload,
1255     GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean end_of_au,
1256     gboolean delta_unit, gboolean discont, guint8 nal_header)
1257 {
1258   GstRtpH264Pay *rtph264pay;
1259   GstFlowReturn ret;
1260   guint mtu, pay_size, bundle_size;
1261   GstBufferList *bundle;
1262   guint8 nal_type;
1263   gboolean start_of_au;
1264 
1265   rtph264pay = GST_RTP_H264_PAY (basepayload);
1266   nal_type = nal_header & 0x1f;
1267   mtu = GST_RTP_BASE_PAYLOAD_MTU (rtph264pay);
1268   pay_size = 2 + gst_buffer_get_size (paybuf);
1269   bundle = rtph264pay->bundle;
1270   start_of_au = FALSE;
1271 
1272   if (bundle) {
1273     GstBuffer *first = gst_buffer_list_get (bundle, 0);
1274 
1275     if (nal_type == AUD_TYPE_ID) {
1276       GST_DEBUG_OBJECT (rtph264pay, "found access delimiter");
1277       start_of_au = TRUE;
1278     } else if (discont) {
1279       GST_DEBUG_OBJECT (rtph264pay, "found discont");
1280       start_of_au = TRUE;
1281     } else if (GST_BUFFER_PTS (first) != pts || GST_BUFFER_DTS (first) != dts) {
1282       GST_DEBUG_OBJECT (rtph264pay, "found timestamp mismatch");
1283       start_of_au = TRUE;
1284     }
1285   }
1286 
1287   if (start_of_au) {
1288     GST_DEBUG_OBJECT (rtph264pay, "sending bundle before start of AU");
1289 
1290     ret = gst_rtp_h264_pay_send_bundle (rtph264pay, TRUE);
1291     if (ret != GST_FLOW_OK)
1292       goto out;
1293 
1294     bundle = NULL;
1295   }
1296 
1297   bundle_size = 1 + pay_size;
1298 
1299   if (gst_rtp_buffer_calc_packet_len (bundle_size, 0, 0) > mtu) {
1300     GST_DEBUG_OBJECT (rtph264pay, "NAL Unit cannot fit in a bundle");
1301 
1302     ret = gst_rtp_h264_pay_send_bundle (rtph264pay, FALSE);
1303     if (ret != GST_FLOW_OK)
1304       goto out;
1305 
1306     return gst_rtp_h264_pay_payload_nal_fragment (basepayload, paybuf, dts, pts,
1307         end_of_au, delta_unit, discont, nal_header);
1308   }
1309 
1310   bundle_size = rtph264pay->bundle_size + pay_size;
1311 
1312   if (gst_rtp_buffer_calc_packet_len (bundle_size, 0, 0) > mtu) {
1313     GST_DEBUG_OBJECT (rtph264pay,
1314         "bundle overflows, sending: bundlesize=%u datasize=2+%u mtu=%u",
1315         rtph264pay->bundle_size, pay_size - 2, mtu);
1316 
1317     ret = gst_rtp_h264_pay_send_bundle (rtph264pay, FALSE);
1318     if (ret != GST_FLOW_OK)
1319       goto out;
1320 
1321     bundle = NULL;
1322   }
1323 
1324   if (!bundle) {
1325     GST_DEBUG_OBJECT (rtph264pay, "creating new STAP-A aggregate");
1326     bundle = rtph264pay->bundle = gst_buffer_list_new ();
1327     bundle_size = rtph264pay->bundle_size = 1;
1328     rtph264pay->bundle_contains_vcl = FALSE;
1329   }
1330 
1331   GST_DEBUG_OBJECT (rtph264pay,
1332       "bundling NAL Unit: bundlesize=%u datasize=2+%u mtu=%u",
1333       rtph264pay->bundle_size, pay_size - 2, mtu);
1334 
1335   paybuf = gst_buffer_make_writable (paybuf);
1336   GST_BUFFER_PTS (paybuf) = pts;
1337   GST_BUFFER_DTS (paybuf) = dts;
1338 
1339   if (delta_unit)
1340     GST_BUFFER_FLAG_SET (paybuf, GST_BUFFER_FLAG_DELTA_UNIT);
1341   else
1342     GST_BUFFER_FLAG_UNSET (paybuf, GST_BUFFER_FLAG_DELTA_UNIT);
1343 
1344   if (discont)
1345     GST_BUFFER_FLAG_SET (paybuf, GST_BUFFER_FLAG_DISCONT);
1346   else
1347     GST_BUFFER_FLAG_UNSET (paybuf, GST_BUFFER_FLAG_DISCONT);
1348 
1349   gst_buffer_list_add (bundle, gst_buffer_ref (paybuf));
1350   rtph264pay->bundle_size += pay_size;
1351   ret = GST_FLOW_OK;
1352 
1353   if ((nal_type >= 1 && nal_type <= 5) || nal_type == 14 ||
1354       (nal_type >= 20 && nal_type <= 23))
1355     rtph264pay->bundle_contains_vcl = TRUE;
1356 
1357   if (end_of_au) {
1358     GST_DEBUG_OBJECT (rtph264pay, "sending bundle at end of AU");
1359     ret = gst_rtp_h264_pay_send_bundle (rtph264pay, TRUE);
1360   }
1361 
1362 out:
1363   gst_buffer_unref (paybuf);
1364   return ret;
1365 }
1366 
1367 static GstFlowReturn
gst_rtp_h264_pay_handle_buffer(GstRTPBasePayload * basepayload,GstBuffer * buffer)1368 gst_rtp_h264_pay_handle_buffer (GstRTPBasePayload * basepayload,
1369     GstBuffer * buffer)
1370 {
1371   GstRtpH264Pay *rtph264pay;
1372   GstFlowReturn ret;
1373   gsize size;
1374   guint nal_len, i;
1375   const guint8 *data;
1376   GstClockTime dts, pts;
1377   GArray *nal_queue;
1378   gboolean avc;
1379   GstBuffer *paybuf = NULL;
1380   gsize skip;
1381   gboolean delayed_not_delta_unit = FALSE;
1382   gboolean delayed_discont = FALSE;
1383   gboolean marker = FALSE;
1384   gboolean draining = (buffer == NULL);
1385 
1386   rtph264pay = GST_RTP_H264_PAY (basepayload);
1387 
1388   /* the input buffer contains one or more NAL units */
1389 
1390   avc = rtph264pay->stream_format == GST_H264_STREAM_FORMAT_AVC;
1391 
1392   if (avc) {
1393     /* In AVC mode, there is no adapter, so nothing to drain */
1394     if (draining)
1395       return GST_FLOW_OK;
1396   } else {
1397     if (buffer) {
1398       if (!GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT)) {
1399         if (gst_adapter_available (rtph264pay->adapter) == 0)
1400           rtph264pay->delta_unit = FALSE;
1401         else
1402           /* This buffer contains a key frame but the adapter isn't empty. So
1403            * we'll purge it first by sending a first packet and then the second
1404            * one won't have the DELTA_UNIT flag. */
1405           delayed_not_delta_unit = TRUE;
1406       }
1407 
1408       if (GST_BUFFER_IS_DISCONT (buffer)) {
1409         if (gst_adapter_available (rtph264pay->adapter) == 0)
1410           rtph264pay->discont = TRUE;
1411         else
1412           /* This buffer has the DISCONT flag but the adapter isn't empty. So
1413            * we'll purge it first by sending a first packet and then the second
1414            * one will have the DISCONT flag set. */
1415           delayed_discont = TRUE;
1416       }
1417 
1418       marker = GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_MARKER);
1419       gst_adapter_push (rtph264pay->adapter, buffer);
1420       buffer = NULL;
1421     }
1422 
1423     /* We want to use the first TS used to construct the following NAL */
1424     dts = gst_adapter_prev_dts (rtph264pay->adapter, NULL);
1425     pts = gst_adapter_prev_pts (rtph264pay->adapter, NULL);
1426 
1427     size = gst_adapter_available (rtph264pay->adapter);
1428     /* Nothing to do here if the adapter is empty, e.g. on EOS */
1429     if (size == 0)
1430       return GST_FLOW_OK;
1431     data = gst_adapter_map (rtph264pay->adapter, size);
1432     GST_DEBUG_OBJECT (basepayload, "got %" G_GSIZE_FORMAT " bytes", size);
1433   }
1434 
1435   ret = GST_FLOW_OK;
1436 
1437   /* now loop over all NAL units and put them in a packet */
1438   if (avc) {
1439     GstBufferMemoryMap memory;
1440     gsize remaining_buffer_size;
1441     guint nal_length_size;
1442     gsize offset = 0;
1443 
1444     gst_buffer_memory_map (buffer, &memory);
1445     remaining_buffer_size = gst_buffer_get_size (buffer);
1446 
1447     pts = GST_BUFFER_PTS (buffer);
1448     dts = GST_BUFFER_DTS (buffer);
1449     rtph264pay->delta_unit = GST_BUFFER_FLAG_IS_SET (buffer,
1450         GST_BUFFER_FLAG_DELTA_UNIT);
1451     rtph264pay->discont = GST_BUFFER_IS_DISCONT (buffer);
1452     marker = GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_MARKER);
1453     GST_DEBUG_OBJECT (basepayload, "got %" G_GSIZE_FORMAT " bytes",
1454         remaining_buffer_size);
1455 
1456     nal_length_size = rtph264pay->nal_length_size;
1457 
1458     while (remaining_buffer_size > nal_length_size) {
1459       gint i;
1460       gboolean end_of_au = FALSE;
1461 
1462       nal_len = 0;
1463       for (i = 0; i < nal_length_size; i++) {
1464         nal_len = (nal_len << 8) + *memory.data;
1465         if (!gst_buffer_memory_advance_bytes (&memory, 1))
1466           break;
1467       }
1468 
1469       offset += nal_length_size;
1470       remaining_buffer_size -= nal_length_size;
1471 
1472       if (remaining_buffer_size >= nal_len) {
1473         GST_DEBUG_OBJECT (basepayload, "got NAL of size %u", nal_len);
1474       } else {
1475         nal_len = remaining_buffer_size;
1476         GST_DEBUG_OBJECT (basepayload, "got incomplete NAL of size %u",
1477             nal_len);
1478       }
1479 
1480       /* If we're at the end of the buffer, then we're at the end of the
1481        * access unit
1482        */
1483       if (remaining_buffer_size - nal_len <= nal_length_size) {
1484         if (rtph264pay->alignment == GST_H264_ALIGNMENT_AU || marker)
1485           end_of_au = TRUE;
1486       }
1487 
1488       paybuf = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, offset,
1489           nal_len);
1490       ret =
1491           gst_rtp_h264_pay_payload_nal (basepayload, paybuf, dts, pts,
1492           end_of_au, rtph264pay->delta_unit, rtph264pay->discont);
1493 
1494       if (!rtph264pay->delta_unit)
1495         /* Only the first outgoing packet doesn't have the DELTA_UNIT flag */
1496         rtph264pay->delta_unit = TRUE;
1497 
1498       if (rtph264pay->discont)
1499         /* Only the first outgoing packet have the DISCONT flag */
1500         rtph264pay->discont = FALSE;
1501 
1502       if (ret != GST_FLOW_OK)
1503         break;
1504 
1505       /* Skip current nal. If it is split over multiple GstMemory
1506        * advance_bytes () will switch to the correct GstMemory. The payloader
1507        * does not access those bytes directly but uses gst_buffer_copy_region ()
1508        * to create a sub-buffer referencing the nal instead */
1509       if (!gst_buffer_memory_advance_bytes (&memory, nal_len))
1510         break;
1511 
1512       offset += nal_len;
1513       remaining_buffer_size -= nal_len;
1514     }
1515 
1516     gst_buffer_memory_unmap (&memory);
1517     gst_buffer_unref (buffer);
1518   } else {
1519     guint next;
1520     gboolean update = FALSE;
1521 
1522     /* get offset of first start code */
1523     next = next_start_code (data, size);
1524 
1525     /* skip to start code, if no start code is found, next will be size and we
1526      * will not collect data. */
1527     data += next;
1528     size -= next;
1529     nal_queue = rtph264pay->queue;
1530     skip = next;
1531 
1532     /* array must be empty when we get here */
1533     g_assert (nal_queue->len == 0);
1534 
1535     GST_DEBUG_OBJECT (basepayload,
1536         "found first start at %u, bytes left %" G_GSIZE_FORMAT, next, size);
1537 
1538     /* first pass to locate NALs and parse SPS/PPS */
1539     while (size > 4) {
1540       /* skip start code */
1541       data += 3;
1542       size -= 3;
1543 
1544       /* use next_start_code() to scan buffer.
1545        * next_start_code() returns the offset in data,
1546        * starting from zero to the first byte of 0.0.0.1
1547        * If no start code is found, it returns the value of the
1548        * 'size' parameter.
1549        * data is unchanged by the call to next_start_code()
1550        */
1551       next = next_start_code (data, size);
1552 
1553       /* nal or au aligned input needs no delaying until next time */
1554       if (next == size && !draining &&
1555           rtph264pay->alignment == GST_H264_ALIGNMENT_UNKNOWN) {
1556         /* Didn't find the start of next NAL and it's not EOS,
1557          * handle it next time */
1558         break;
1559       }
1560 
1561       /* nal length is distance to next start code */
1562       nal_len = next;
1563 
1564       GST_DEBUG_OBJECT (basepayload, "found next start at %u of size %u", next,
1565           nal_len);
1566 
1567       if (rtph264pay->sprop_parameter_sets != NULL) {
1568         /* explicitly set profile and sprop, use those */
1569         if (rtph264pay->update_caps) {
1570           if (!gst_rtp_base_payload_set_outcaps (basepayload,
1571                   "sprop-parameter-sets", G_TYPE_STRING,
1572                   rtph264pay->sprop_parameter_sets, NULL))
1573             goto caps_rejected;
1574 
1575           /* parse SPS and PPS from provided parameter set (for insertion) */
1576           gst_rtp_h264_pay_parse_sprop_parameter_sets (rtph264pay);
1577 
1578           rtph264pay->update_caps = FALSE;
1579 
1580           GST_DEBUG ("outcaps update: sprop-parameter-sets=%s",
1581               rtph264pay->sprop_parameter_sets);
1582         }
1583       } else {
1584         /* We know our stream is a valid H264 NAL packet,
1585          * go parse it for SPS/PPS to enrich the caps */
1586         /* order: make sure to check nal */
1587         update =
1588             gst_rtp_h264_pay_decode_nal (rtph264pay, data, nal_len, dts, pts)
1589             || update;
1590       }
1591       /* move to next NAL packet */
1592       data += nal_len;
1593       size -= nal_len;
1594 
1595       g_array_append_val (nal_queue, nal_len);
1596     }
1597 
1598     /* if has new SPS & PPS, update the output caps */
1599     if (G_UNLIKELY (update))
1600       if (!gst_rtp_h264_pay_set_sps_pps (basepayload))
1601         goto caps_rejected;
1602 
1603     /* second pass to payload and push */
1604 
1605     if (nal_queue->len != 0)
1606       gst_adapter_flush (rtph264pay->adapter, skip);
1607 
1608     for (i = 0; i < nal_queue->len; i++) {
1609       guint size;
1610       gboolean end_of_au = FALSE;
1611 
1612       nal_len = g_array_index (nal_queue, guint, i);
1613       /* skip start code */
1614       gst_adapter_flush (rtph264pay->adapter, 3);
1615 
1616       /* Trim the end unless we're the last NAL in the stream.
1617        * In case we're not at the end of the buffer we know the next block
1618        * starts with 0x000001 so all the 0x00 bytes at the end of this one are
1619        * trailing 0x0 that can be discarded */
1620       size = nal_len;
1621       data = gst_adapter_map (rtph264pay->adapter, size);
1622       if (i + 1 != nal_queue->len || !draining)
1623         for (; size > 1 && data[size - 1] == 0x0; size--)
1624           /* skip */ ;
1625 
1626 
1627       /* If it's the last nal unit we have in non-bytestream mode, we can
1628        * assume it's the end of an access-unit
1629        *
1630        * FIXME: We need to wait until the next packet or EOS to
1631        * actually payload the NAL so we can know if the current NAL is
1632        * the last one of an access unit or not if we are in bytestream mode
1633        */
1634       if (i == nal_queue->len - 1) {
1635         if (rtph264pay->alignment == GST_H264_ALIGNMENT_AU ||
1636             marker || draining)
1637           end_of_au = TRUE;
1638       }
1639       paybuf = gst_adapter_take_buffer (rtph264pay->adapter, size);
1640       g_assert (paybuf);
1641 
1642       /* put the data in one or more RTP packets */
1643       ret =
1644           gst_rtp_h264_pay_payload_nal (basepayload, paybuf, dts, pts,
1645           end_of_au, rtph264pay->delta_unit, rtph264pay->discont);
1646 
1647       if (delayed_not_delta_unit) {
1648         rtph264pay->delta_unit = FALSE;
1649         delayed_not_delta_unit = FALSE;
1650       } else {
1651         /* Only the first outgoing packet doesn't have the DELTA_UNIT flag */
1652         rtph264pay->delta_unit = TRUE;
1653       }
1654 
1655       if (delayed_discont) {
1656         rtph264pay->discont = TRUE;
1657         delayed_discont = FALSE;
1658       } else {
1659         /* Only the first outgoing packet have the DISCONT flag */
1660         rtph264pay->discont = FALSE;
1661       }
1662 
1663       if (ret != GST_FLOW_OK) {
1664         break;
1665       }
1666 
1667       /* move to next NAL packet */
1668       /* Skips the trailing zeros */
1669       gst_adapter_flush (rtph264pay->adapter, nal_len - size);
1670     }
1671     g_array_set_size (nal_queue, 0);
1672   }
1673 
1674   if (ret == GST_FLOW_OK && rtph264pay->bundle_size > 0 &&
1675       rtph264pay->aggregate_mode == GST_RTP_H264_AGGREGATE_ZERO_LATENCY &&
1676       rtph264pay->bundle_contains_vcl) {
1677     GST_DEBUG_OBJECT (rtph264pay, "sending bundle at end incoming packet");
1678     ret = gst_rtp_h264_pay_send_bundle (rtph264pay, FALSE);
1679   }
1680 
1681 
1682 done:
1683   if (!avc) {
1684     gst_adapter_unmap (rtph264pay->adapter);
1685   }
1686 
1687   return ret;
1688 
1689 caps_rejected:
1690   {
1691     GST_WARNING_OBJECT (basepayload, "Could not set outcaps");
1692     g_array_set_size (nal_queue, 0);
1693     ret = GST_FLOW_NOT_NEGOTIATED;
1694     goto done;
1695   }
1696 }
1697 
1698 static gboolean
gst_rtp_h264_pay_sink_event(GstRTPBasePayload * payload,GstEvent * event)1699 gst_rtp_h264_pay_sink_event (GstRTPBasePayload * payload, GstEvent * event)
1700 {
1701   gboolean res;
1702   const GstStructure *s;
1703   GstRtpH264Pay *rtph264pay = GST_RTP_H264_PAY (payload);
1704   GstFlowReturn ret = GST_FLOW_OK;
1705 
1706   switch (GST_EVENT_TYPE (event)) {
1707     case GST_EVENT_FLUSH_STOP:
1708       gst_adapter_clear (rtph264pay->adapter);
1709       gst_rtp_h264_pay_reset_bundle (rtph264pay);
1710       break;
1711     case GST_EVENT_CUSTOM_DOWNSTREAM:
1712       s = gst_event_get_structure (event);
1713       if (gst_structure_has_name (s, "GstForceKeyUnit")) {
1714         gboolean resend_codec_data;
1715 
1716         if (gst_structure_get_boolean (s, "all-headers",
1717                 &resend_codec_data) && resend_codec_data)
1718           rtph264pay->send_spspps = TRUE;
1719       }
1720       break;
1721     case GST_EVENT_EOS:
1722     {
1723       /* call handle_buffer with NULL to flush last NAL from adapter
1724        * in byte-stream mode
1725        */
1726       gst_rtp_h264_pay_handle_buffer (payload, NULL);
1727       ret = gst_rtp_h264_pay_send_bundle (rtph264pay, TRUE);
1728       break;
1729     }
1730     case GST_EVENT_STREAM_START:
1731       GST_DEBUG_OBJECT (rtph264pay, "New stream detected => Clear SPS and PPS");
1732       gst_rtp_h264_pay_clear_sps_pps (rtph264pay);
1733       ret = gst_rtp_h264_pay_send_bundle (rtph264pay, TRUE);
1734       break;
1735     default:
1736       break;
1737   }
1738 
1739   if (ret != GST_FLOW_OK)
1740     return FALSE;
1741 
1742   res = GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->sink_event (payload, event);
1743 
1744   return res;
1745 }
1746 
1747 static GstStateChangeReturn
gst_rtp_h264_pay_change_state(GstElement * element,GstStateChange transition)1748 gst_rtp_h264_pay_change_state (GstElement * element, GstStateChange transition)
1749 {
1750   GstStateChangeReturn ret;
1751   GstRtpH264Pay *rtph264pay = GST_RTP_H264_PAY (element);
1752 
1753   switch (transition) {
1754     case GST_STATE_CHANGE_READY_TO_PAUSED:
1755       rtph264pay->send_spspps = FALSE;
1756       gst_adapter_clear (rtph264pay->adapter);
1757       gst_rtp_h264_pay_reset_bundle (rtph264pay);
1758       break;
1759     default:
1760       break;
1761   }
1762 
1763   ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
1764 
1765   switch (transition) {
1766     case GST_STATE_CHANGE_PAUSED_TO_READY:
1767       rtph264pay->last_spspps = -1;
1768       gst_rtp_h264_pay_clear_sps_pps (rtph264pay);
1769       break;
1770     default:
1771       break;
1772   }
1773 
1774   return ret;
1775 }
1776 
1777 static void
gst_rtp_h264_pay_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)1778 gst_rtp_h264_pay_set_property (GObject * object, guint prop_id,
1779     const GValue * value, GParamSpec * pspec)
1780 {
1781   GstRtpH264Pay *rtph264pay;
1782 
1783   rtph264pay = GST_RTP_H264_PAY (object);
1784 
1785   switch (prop_id) {
1786     case PROP_SPROP_PARAMETER_SETS:
1787       g_free (rtph264pay->sprop_parameter_sets);
1788       rtph264pay->sprop_parameter_sets = g_value_dup_string (value);
1789       rtph264pay->update_caps = TRUE;
1790       break;
1791     case PROP_CONFIG_INTERVAL:
1792       rtph264pay->spspps_interval = g_value_get_int (value);
1793       break;
1794     case PROP_AGGREGATE_MODE:
1795       rtph264pay->aggregate_mode = g_value_get_enum (value);
1796       break;
1797     default:
1798       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1799       break;
1800   }
1801 }
1802 
1803 static void
gst_rtp_h264_pay_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)1804 gst_rtp_h264_pay_get_property (GObject * object, guint prop_id,
1805     GValue * value, GParamSpec * pspec)
1806 {
1807   GstRtpH264Pay *rtph264pay;
1808 
1809   rtph264pay = GST_RTP_H264_PAY (object);
1810 
1811   switch (prop_id) {
1812     case PROP_SPROP_PARAMETER_SETS:
1813       g_value_set_string (value, rtph264pay->sprop_parameter_sets);
1814       break;
1815     case PROP_CONFIG_INTERVAL:
1816       g_value_set_int (value, rtph264pay->spspps_interval);
1817       break;
1818     case PROP_AGGREGATE_MODE:
1819       g_value_set_enum (value, rtph264pay->aggregate_mode);
1820       break;
1821     default:
1822       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1823       break;
1824   }
1825 }
1826