1 /* GStreamer
2 * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
13 *
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
18 */
19
20 #ifdef HAVE_CONFIG_H
21 # include "config.h"
22 #endif
23
24 #include <string.h>
25 #include <stdlib.h>
26 #include <stdio.h>
27
28 #include <gst/rtp/gstrtpbuffer.h>
29 #include <gst/video/video.h>
30
31 #include "gstrtpelements.h"
32 #include "gstrtph263ppay.h"
33 #include "gstrtputils.h"
34
35 #define DEFAULT_FRAGMENTATION_MODE GST_FRAGMENTATION_MODE_NORMAL
36
37 enum
38 {
39 PROP_0,
40 PROP_FRAGMENTATION_MODE
41 };
42
43 #define GST_TYPE_FRAGMENTATION_MODE (gst_fragmentation_mode_get_type())
44 static GType
gst_fragmentation_mode_get_type(void)45 gst_fragmentation_mode_get_type (void)
46 {
47 static GType fragmentation_mode_type = 0;
48 static const GEnumValue fragmentation_mode[] = {
49 {GST_FRAGMENTATION_MODE_NORMAL, "Normal", "normal"},
50 {GST_FRAGMENTATION_MODE_SYNC, "Fragment at sync points", "sync"},
51 {0, NULL, NULL},
52 };
53
54 if (!fragmentation_mode_type) {
55 fragmentation_mode_type =
56 g_enum_register_static ("GstFragmentationMode", fragmentation_mode);
57 }
58 return fragmentation_mode_type;
59 }
60
61
62 GST_DEBUG_CATEGORY_STATIC (rtph263ppay_debug);
63 #define GST_CAT_DEFAULT rtph263ppay_debug
64
65 static GstStaticPadTemplate gst_rtp_h263p_pay_sink_template =
66 GST_STATIC_PAD_TEMPLATE ("sink",
67 GST_PAD_SINK,
68 GST_PAD_ALWAYS,
69 GST_STATIC_CAPS ("video/x-h263, variant = (string) itu")
70 );
71
72 /*
73 * We also return these in getcaps() as required by the SDP caps
74 *
75 * width = (int) [16, 4096]
76 * height = (int) [16, 4096]
77 * "annex-f = (boolean) {true, false},"
78 * "annex-i = (boolean) {true, false},"
79 * "annex-j = (boolean) {true, false},"
80 * "annex-l = (boolean) {true, false},"
81 * "annex-t = (boolean) {true, false},"
82 * "annex-v = (boolean) {true, false}")
83 */
84
85
86 static GstStaticPadTemplate gst_rtp_h263p_pay_src_template =
87 GST_STATIC_PAD_TEMPLATE ("src",
88 GST_PAD_SRC,
89 GST_PAD_ALWAYS,
90 GST_STATIC_CAPS ("application/x-rtp, "
91 "media = (string) \"video\", "
92 "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
93 "clock-rate = (int) 90000, " "encoding-name = (string) \"H263-1998\"; "
94 "application/x-rtp, "
95 "media = (string) \"video\", "
96 "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
97 "clock-rate = (int) 90000, " "encoding-name = (string) \"H263-2000\"")
98 );
99
100 static void gst_rtp_h263p_pay_finalize (GObject * object);
101
102 static void gst_rtp_h263p_pay_set_property (GObject * object, guint prop_id,
103 const GValue * value, GParamSpec * pspec);
104 static void gst_rtp_h263p_pay_get_property (GObject * object, guint prop_id,
105 GValue * value, GParamSpec * pspec);
106
107 static gboolean gst_rtp_h263p_pay_setcaps (GstRTPBasePayload * payload,
108 GstCaps * caps);
109 static GstCaps *gst_rtp_h263p_pay_sink_getcaps (GstRTPBasePayload * payload,
110 GstPad * pad, GstCaps * filter);
111 static GstFlowReturn gst_rtp_h263p_pay_handle_buffer (GstRTPBasePayload *
112 payload, GstBuffer * buffer);
113
114 #define gst_rtp_h263p_pay_parent_class parent_class
115 G_DEFINE_TYPE (GstRtpH263PPay, gst_rtp_h263p_pay, GST_TYPE_RTP_BASE_PAYLOAD);
116 GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtph263ppay, "rtph263ppay",
117 GST_RANK_SECONDARY, GST_TYPE_RTP_H263P_PAY, rtp_element_init (plugin));
118
119 static void
gst_rtp_h263p_pay_class_init(GstRtpH263PPayClass * klass)120 gst_rtp_h263p_pay_class_init (GstRtpH263PPayClass * klass)
121 {
122 GObjectClass *gobject_class;
123 GstElementClass *gstelement_class;
124 GstRTPBasePayloadClass *gstrtpbasepayload_class;
125
126 gobject_class = (GObjectClass *) klass;
127 gstelement_class = (GstElementClass *) klass;
128 gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
129
130 gobject_class->finalize = gst_rtp_h263p_pay_finalize;
131 gobject_class->set_property = gst_rtp_h263p_pay_set_property;
132 gobject_class->get_property = gst_rtp_h263p_pay_get_property;
133
134 gstrtpbasepayload_class->set_caps = gst_rtp_h263p_pay_setcaps;
135 gstrtpbasepayload_class->get_caps = gst_rtp_h263p_pay_sink_getcaps;
136 gstrtpbasepayload_class->handle_buffer = gst_rtp_h263p_pay_handle_buffer;
137
138 g_object_class_install_property (G_OBJECT_CLASS (klass),
139 PROP_FRAGMENTATION_MODE, g_param_spec_enum ("fragmentation-mode",
140 "Fragmentation Mode",
141 "Packet Fragmentation Mode", GST_TYPE_FRAGMENTATION_MODE,
142 DEFAULT_FRAGMENTATION_MODE,
143 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
144
145 gst_element_class_add_static_pad_template (gstelement_class,
146 &gst_rtp_h263p_pay_src_template);
147 gst_element_class_add_static_pad_template (gstelement_class,
148 &gst_rtp_h263p_pay_sink_template);
149
150 gst_element_class_set_static_metadata (gstelement_class, "RTP H263 payloader",
151 "Codec/Payloader/Network/RTP",
152 "Payload-encodes H263/+/++ video in RTP packets (RFC 4629)",
153 "Wim Taymans <wim.taymans@gmail.com>");
154
155 GST_DEBUG_CATEGORY_INIT (rtph263ppay_debug, "rtph263ppay",
156 0, "rtph263ppay (RFC 4629)");
157
158 gst_type_mark_as_plugin_api (GST_TYPE_FRAGMENTATION_MODE, 0);
159 }
160
161 static void
gst_rtp_h263p_pay_init(GstRtpH263PPay * rtph263ppay)162 gst_rtp_h263p_pay_init (GstRtpH263PPay * rtph263ppay)
163 {
164 rtph263ppay->adapter = gst_adapter_new ();
165
166 rtph263ppay->fragmentation_mode = DEFAULT_FRAGMENTATION_MODE;
167 }
168
169 static void
gst_rtp_h263p_pay_finalize(GObject * object)170 gst_rtp_h263p_pay_finalize (GObject * object)
171 {
172 GstRtpH263PPay *rtph263ppay;
173
174 rtph263ppay = GST_RTP_H263P_PAY (object);
175
176 g_object_unref (rtph263ppay->adapter);
177 rtph263ppay->adapter = NULL;
178
179 G_OBJECT_CLASS (parent_class)->finalize (object);
180 }
181
182 static gboolean
gst_rtp_h263p_pay_setcaps(GstRTPBasePayload * payload,GstCaps * caps)183 gst_rtp_h263p_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
184 {
185 gboolean res;
186 GstCaps *peercaps;
187 gchar *encoding_name = NULL;
188
189 g_return_val_if_fail (gst_caps_is_fixed (caps), FALSE);
190
191 peercaps =
192 gst_pad_peer_query_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload), NULL);
193 if (peercaps) {
194 GstCaps *tcaps =
195 gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload));
196 GstCaps *intersect = gst_caps_intersect (peercaps, tcaps);
197 gst_caps_unref (tcaps);
198
199 gst_caps_unref (peercaps);
200 if (!gst_caps_is_empty (intersect)) {
201 GstStructure *s = gst_caps_get_structure (intersect, 0);
202 encoding_name = g_strdup (gst_structure_get_string (s, "encoding-name"));
203 }
204 gst_caps_unref (intersect);
205 }
206
207 if (!encoding_name)
208 encoding_name = g_strdup ("H263-1998");
209
210 gst_rtp_base_payload_set_options (payload, "video", TRUE,
211 (gchar *) encoding_name, 90000);
212 res = gst_rtp_base_payload_set_outcaps (payload, NULL);
213 g_free (encoding_name);
214
215 return res;
216 }
217
218 static GstCaps *
caps_append(GstCaps * caps,GstStructure * in_s,guint x,guint y,guint mpi)219 caps_append (GstCaps * caps, GstStructure * in_s, guint x, guint y, guint mpi)
220 {
221 GstStructure *s;
222
223 if (!in_s)
224 return caps;
225
226 if (mpi < 1 || mpi > 32)
227 return caps;
228
229 s = gst_structure_copy (in_s);
230
231 gst_structure_set (s,
232 "width", GST_TYPE_INT_RANGE, 1, x,
233 "height", GST_TYPE_INT_RANGE, 1, y,
234 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 1001 * mpi, NULL);
235
236 caps = gst_caps_merge_structure (caps, s);
237
238 return caps;
239 }
240
241
242 static GstCaps *
gst_rtp_h263p_pay_sink_getcaps(GstRTPBasePayload * payload,GstPad * pad,GstCaps * filter)243 gst_rtp_h263p_pay_sink_getcaps (GstRTPBasePayload * payload, GstPad * pad,
244 GstCaps * filter)
245 {
246 GstRtpH263PPay *rtph263ppay;
247 GstCaps *caps = NULL, *templ;
248 GstCaps *peercaps = NULL;
249 GstCaps *intersect = NULL;
250 guint i;
251
252 rtph263ppay = GST_RTP_H263P_PAY (payload);
253
254 peercaps =
255 gst_pad_peer_query_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload), NULL);
256
257 /* if we're just outputting to udpsink or fakesink or so, we should also
258 * accept any input compatible with our sink template caps */
259 if (!peercaps || gst_caps_is_any (peercaps)) {
260 if (peercaps)
261 gst_caps_unref (peercaps);
262 caps =
263 gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SINKPAD (payload));
264 goto done;
265 }
266
267 /* We basically need to differentiate two use-cases here: One where there's
268 * a capsfilter after the payloader with caps created from an SDP; in this
269 * case the filter caps are fixed and we want to signal to an encoder what
270 * we want it to produce. The second case is simply payloader ! depayloader
271 * where we are dealing with the depayloader's template caps. In this case
272 * we should accept any input compatible with our sink template caps. */
273 if (!gst_caps_is_fixed (peercaps)) {
274 gst_caps_unref (peercaps);
275 caps =
276 gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SINKPAD (payload));
277 goto done;
278 }
279
280 templ = gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload));
281 intersect = gst_caps_intersect (peercaps, templ);
282 gst_caps_unref (peercaps);
283 gst_caps_unref (templ);
284
285 if (gst_caps_is_empty (intersect))
286 return intersect;
287
288 caps = gst_caps_new_empty ();
289 for (i = 0; i < gst_caps_get_size (intersect); i++) {
290 GstStructure *s = gst_caps_get_structure (intersect, i);
291 const gchar *encoding_name = gst_structure_get_string (s, "encoding-name");
292
293 if (!strcmp (encoding_name, "H263-2000")) {
294 const gchar *profile_str = gst_structure_get_string (s, "profile");
295 const gchar *level_str = gst_structure_get_string (s, "level");
296 int profile = 0;
297 int level = 0;
298
299 if (profile_str && level_str) {
300 gboolean i = FALSE, j = FALSE, l = FALSE, t = FALSE, f = FALSE,
301 v = FALSE;
302 GstStructure *new_s = gst_structure_new ("video/x-h263",
303 "variant", G_TYPE_STRING, "itu",
304 NULL);
305
306 profile = atoi (profile_str);
307 level = atoi (level_str);
308
309 /* These profiles are defined in the H.263 Annex X */
310 switch (profile) {
311 case 0:
312 /* The Baseline Profile (Profile 0) */
313 break;
314 case 1:
315 /* H.320 Coding Efficiency Version 2 Backward-Compatibility Profile
316 * (Profile 1)
317 * Baseline + Annexes I, J, L.4 and T
318 */
319 i = j = l = t = TRUE;
320 break;
321 case 2:
322 /* Version 1 Backward-Compatibility Profile (Profile 2)
323 * Baseline + Annex F
324 */
325 i = j = l = t = f = TRUE;
326 break;
327 case 3:
328 /* Version 2 Interactive and Streaming Wireless Profile
329 * Baseline + Annexes I, J, T
330 */
331 i = j = t = TRUE;
332 break;
333 case 4:
334 /* Version 3 Interactive and Streaming Wireless Profile (Profile 4)
335 * Baseline + Annexes I, J, T, V, W.6.3.8,
336 */
337 /* Missing W.6.3.8 */
338 i = j = t = v = TRUE;
339 break;
340 case 5:
341 /* Conversational High Compression Profile (Profile 5)
342 * Baseline + Annexes F, I, J, L.4, T, D, U
343 */
344 /* Missing D, U */
345 f = i = j = l = t = TRUE;
346 break;
347 case 6:
348 /* Conversational Internet Profile (Profile 6)
349 * Baseline + Annexes F, I, J, L.4, T, D, U and
350 * K with arbitratry slice ordering
351 */
352 /* Missing D, U, K with arbitratry slice ordering */
353 f = i = j = l = t = TRUE;
354 break;
355 case 7:
356 /* Conversational Interlace Profile (Profile 7)
357 * Baseline + Annexes F, I, J, L.4, T, D, U, W.6.3.11
358 */
359 /* Missing D, U, W.6.3.11 */
360 f = i = j = l = t = TRUE;
361 break;
362 case 8:
363 /* High Latency Profile (Profile 8)
364 * Baseline + Annexes F, I, J, L.4, T, D, U, P.5, O.1.1 and
365 * K with arbitratry slice ordering
366 */
367 /* Missing D, U, P.5, O.1.1 */
368 f = i = j = l = t = TRUE;
369 break;
370 }
371
372
373 if (f || i || j || t || l || v) {
374 GValue list = { 0 };
375 GValue vstr = { 0 };
376
377 g_value_init (&list, GST_TYPE_LIST);
378 g_value_init (&vstr, G_TYPE_STRING);
379
380 g_value_set_static_string (&vstr, "h263");
381 gst_value_list_append_value (&list, &vstr);
382 g_value_set_static_string (&vstr, "h263p");
383 gst_value_list_append_value (&list, &vstr);
384
385 if (l || v) {
386 g_value_set_static_string (&vstr, "h263pp");
387 gst_value_list_append_value (&list, &vstr);
388 }
389 g_value_unset (&vstr);
390
391 gst_structure_set_value (new_s, "h263version", &list);
392 g_value_unset (&list);
393 } else {
394 gst_structure_set (new_s, "h263version", G_TYPE_STRING, "h263", NULL);
395 }
396
397
398 if (!f)
399 gst_structure_set (new_s, "annex-f", G_TYPE_BOOLEAN, FALSE, NULL);
400 if (!i)
401 gst_structure_set (new_s, "annex-i", G_TYPE_BOOLEAN, FALSE, NULL);
402 if (!j)
403 gst_structure_set (new_s, "annex-j", G_TYPE_BOOLEAN, FALSE, NULL);
404 if (!t)
405 gst_structure_set (new_s, "annex-t", G_TYPE_BOOLEAN, FALSE, NULL);
406 if (!l)
407 gst_structure_set (new_s, "annex-l", G_TYPE_BOOLEAN, FALSE, NULL);
408 if (!v)
409 gst_structure_set (new_s, "annex-v", G_TYPE_BOOLEAN, FALSE, NULL);
410
411
412 if (level <= 10 || level == 45) {
413 gst_structure_set (new_s,
414 "width", GST_TYPE_INT_RANGE, 1, 176,
415 "height", GST_TYPE_INT_RANGE, 1, 144,
416 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 2002, NULL);
417 caps = gst_caps_merge_structure (caps, new_s);
418 } else if (level <= 20) {
419 GstStructure *s_copy = gst_structure_copy (new_s);
420
421 gst_structure_set (new_s,
422 "width", GST_TYPE_INT_RANGE, 1, 352,
423 "height", GST_TYPE_INT_RANGE, 1, 288,
424 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 2002, NULL);
425 caps = gst_caps_merge_structure (caps, new_s);
426
427 gst_structure_set (s_copy,
428 "width", GST_TYPE_INT_RANGE, 1, 176,
429 "height", GST_TYPE_INT_RANGE, 1, 144,
430 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 1001, NULL);
431 caps = gst_caps_merge_structure (caps, s_copy);
432 } else if (level <= 40) {
433
434 gst_structure_set (new_s,
435 "width", GST_TYPE_INT_RANGE, 1, 352,
436 "height", GST_TYPE_INT_RANGE, 1, 288,
437 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 1001, NULL);
438 caps = gst_caps_merge_structure (caps, new_s);
439 } else if (level <= 50) {
440 GstStructure *s_copy = gst_structure_copy (new_s);
441
442 gst_structure_set (new_s,
443 "width", GST_TYPE_INT_RANGE, 1, 352,
444 "height", GST_TYPE_INT_RANGE, 1, 288,
445 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 50, 1, NULL);
446 caps = gst_caps_merge_structure (caps, new_s);
447
448 gst_structure_set (s_copy,
449 "width", GST_TYPE_INT_RANGE, 1, 352,
450 "height", GST_TYPE_INT_RANGE, 1, 240,
451 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 60000, 1001, NULL);
452 caps = gst_caps_merge_structure (caps, s_copy);
453 } else if (level <= 60) {
454 GstStructure *s_copy = gst_structure_copy (new_s);
455
456 gst_structure_set (new_s,
457 "width", GST_TYPE_INT_RANGE, 1, 720,
458 "height", GST_TYPE_INT_RANGE, 1, 288,
459 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 50, 1, NULL);
460 caps = gst_caps_merge_structure (caps, new_s);
461
462 gst_structure_set (s_copy,
463 "width", GST_TYPE_INT_RANGE, 1, 720,
464 "height", GST_TYPE_INT_RANGE, 1, 240,
465 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 60000, 1001, NULL);
466 caps = gst_caps_merge_structure (caps, s_copy);
467 } else if (level <= 70) {
468 GstStructure *s_copy = gst_structure_copy (new_s);
469
470 gst_structure_set (new_s,
471 "width", GST_TYPE_INT_RANGE, 1, 720,
472 "height", GST_TYPE_INT_RANGE, 1, 576,
473 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 50, 1, NULL);
474 caps = gst_caps_merge_structure (caps, new_s);
475
476 gst_structure_set (s_copy,
477 "width", GST_TYPE_INT_RANGE, 1, 720,
478 "height", GST_TYPE_INT_RANGE, 1, 480,
479 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 60000, 1001, NULL);
480 caps = gst_caps_merge_structure (caps, s_copy);
481 } else {
482 caps = gst_caps_merge_structure (caps, new_s);
483 }
484
485 } else {
486 GstStructure *new_s = gst_structure_new ("video/x-h263",
487 "variant", G_TYPE_STRING, "itu",
488 "h263version", G_TYPE_STRING, "h263",
489 NULL);
490
491 GST_DEBUG_OBJECT (rtph263ppay, "No profile or level specified"
492 " for H263-2000, defaulting to baseline H263");
493
494 caps = gst_caps_merge_structure (caps, new_s);
495 }
496 } else {
497 gboolean f = FALSE, i = FALSE, j = FALSE, t = FALSE;
498 /* FIXME: ffmpeg support the Appendix K too, how do we express it ?
499 * guint k;
500 */
501 const gchar *str;
502 GstStructure *new_s = gst_structure_new ("video/x-h263",
503 "variant", G_TYPE_STRING, "itu",
504 NULL);
505 gboolean added = FALSE;
506
507 str = gst_structure_get_string (s, "f");
508 if (str && !strcmp (str, "1"))
509 f = TRUE;
510
511 str = gst_structure_get_string (s, "i");
512 if (str && !strcmp (str, "1"))
513 i = TRUE;
514
515 str = gst_structure_get_string (s, "j");
516 if (str && !strcmp (str, "1"))
517 j = TRUE;
518
519 str = gst_structure_get_string (s, "t");
520 if (str && !strcmp (str, "1"))
521 t = TRUE;
522
523 if (f || i || j || t) {
524 GValue list = { 0 };
525 GValue vstr = { 0 };
526
527 g_value_init (&list, GST_TYPE_LIST);
528 g_value_init (&vstr, G_TYPE_STRING);
529
530 g_value_set_static_string (&vstr, "h263");
531 gst_value_list_append_value (&list, &vstr);
532 g_value_set_static_string (&vstr, "h263p");
533 gst_value_list_append_value (&list, &vstr);
534 g_value_unset (&vstr);
535
536 gst_structure_set_value (new_s, "h263version", &list);
537 g_value_unset (&list);
538 } else {
539 gst_structure_set (new_s, "h263version", G_TYPE_STRING, "h263", NULL);
540 }
541
542 if (!f)
543 gst_structure_set (new_s, "annex-f", G_TYPE_BOOLEAN, FALSE, NULL);
544 if (!i)
545 gst_structure_set (new_s, "annex-i", G_TYPE_BOOLEAN, FALSE, NULL);
546 if (!j)
547 gst_structure_set (new_s, "annex-j", G_TYPE_BOOLEAN, FALSE, NULL);
548 if (!t)
549 gst_structure_set (new_s, "annex-t", G_TYPE_BOOLEAN, FALSE, NULL);
550
551
552 str = gst_structure_get_string (s, "custom");
553 if (str) {
554 unsigned int xmax, ymax, mpi;
555 if (sscanf (str, "%u,%u,%u", &xmax, &ymax, &mpi) == 3) {
556 if (xmax % 4 && ymax % 4 && mpi >= 1 && mpi <= 32) {
557 caps = caps_append (caps, new_s, xmax, ymax, mpi);
558 added = TRUE;
559 } else {
560 GST_WARNING_OBJECT (rtph263ppay, "Invalid custom framesize/MPI"
561 " %u x %u at %u, ignoring", xmax, ymax, mpi);
562 }
563 } else {
564 GST_WARNING_OBJECT (rtph263ppay, "Invalid custom framesize/MPI: %s,"
565 " ignoring", str);
566 }
567 }
568
569 str = gst_structure_get_string (s, "16cif");
570 if (str) {
571 int mpi = atoi (str);
572 caps = caps_append (caps, new_s, 1408, 1152, mpi);
573 added = TRUE;
574 }
575
576 str = gst_structure_get_string (s, "4cif");
577 if (str) {
578 int mpi = atoi (str);
579 caps = caps_append (caps, new_s, 704, 576, mpi);
580 added = TRUE;
581 }
582
583 str = gst_structure_get_string (s, "cif");
584 if (str) {
585 int mpi = atoi (str);
586 caps = caps_append (caps, new_s, 352, 288, mpi);
587 added = TRUE;
588 }
589
590 str = gst_structure_get_string (s, "qcif");
591 if (str) {
592 int mpi = atoi (str);
593 caps = caps_append (caps, new_s, 176, 144, mpi);
594 added = TRUE;
595 }
596
597 str = gst_structure_get_string (s, "sqcif");
598 if (str) {
599 int mpi = atoi (str);
600 caps = caps_append (caps, new_s, 128, 96, mpi);
601 added = TRUE;
602 }
603
604 if (added)
605 gst_structure_free (new_s);
606 else
607 caps = gst_caps_merge_structure (caps, new_s);
608 }
609 }
610
611 gst_caps_unref (intersect);
612
613 done:
614
615 if (filter) {
616 GstCaps *tmp;
617
618 GST_DEBUG_OBJECT (payload, "Intersect %" GST_PTR_FORMAT " and filter %"
619 GST_PTR_FORMAT, caps, filter);
620 tmp = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
621 gst_caps_unref (caps);
622 caps = tmp;
623 }
624
625 return caps;
626 }
627
628
629 static void
gst_rtp_h263p_pay_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)630 gst_rtp_h263p_pay_set_property (GObject * object, guint prop_id,
631 const GValue * value, GParamSpec * pspec)
632 {
633 GstRtpH263PPay *rtph263ppay;
634
635 rtph263ppay = GST_RTP_H263P_PAY (object);
636
637 switch (prop_id) {
638 case PROP_FRAGMENTATION_MODE:
639 rtph263ppay->fragmentation_mode = g_value_get_enum (value);
640 break;
641 default:
642 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
643 break;
644 }
645 }
646
647 static void
gst_rtp_h263p_pay_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)648 gst_rtp_h263p_pay_get_property (GObject * object, guint prop_id,
649 GValue * value, GParamSpec * pspec)
650 {
651 GstRtpH263PPay *rtph263ppay;
652
653 rtph263ppay = GST_RTP_H263P_PAY (object);
654
655 switch (prop_id) {
656 case PROP_FRAGMENTATION_MODE:
657 g_value_set_enum (value, rtph263ppay->fragmentation_mode);
658 break;
659 default:
660 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
661 break;
662 }
663 }
664
665 static GstFlowReturn
gst_rtp_h263p_pay_flush(GstRtpH263PPay * rtph263ppay)666 gst_rtp_h263p_pay_flush (GstRtpH263PPay * rtph263ppay)
667 {
668 guint avail;
669 GstBufferList *list = NULL;
670 GstBuffer *outbuf = NULL;
671 GstFlowReturn ret;
672 gboolean fragmented = FALSE;
673
674 avail = gst_adapter_available (rtph263ppay->adapter);
675 if (avail == 0)
676 return GST_FLOW_OK;
677
678 fragmented = FALSE;
679 /* This algorithm assumes the H263/+/++ encoder sends complete frames in each
680 * buffer */
681 /* With Fragmentation Mode at GST_FRAGMENTATION_MODE_NORMAL:
682 * This algorithm implements the Follow-on packets method for packetization.
683 * This assumes low packet loss network.
684 * With Fragmentation Mode at GST_FRAGMENTATION_MODE_SYNC:
685 * This algorithm separates large frames at synchronisation points (Segments)
686 * (See RFC 4629 section 6). It would be interesting to have a property such as network
687 * quality to select between both packetization methods */
688 /* TODO Add VRC support (See RFC 4629 section 5.2) */
689
690 while (avail > 0) {
691 guint towrite;
692 guint8 *payload;
693 gint header_len;
694 guint next_gop = 0;
695 gboolean found_gob = FALSE;
696 GstRTPBuffer rtp = { NULL };
697 GstBuffer *payload_buf;
698
699 if (rtph263ppay->fragmentation_mode == GST_FRAGMENTATION_MODE_SYNC) {
700 /* start after 1st gop possible */
701
702 /* Check if we have a gob or eos , eossbs */
703 /* FIXME EOS and EOSSBS packets should never contain any gobs and vice-versa */
704 next_gop =
705 gst_adapter_masked_scan_uint32 (rtph263ppay->adapter, 0xffff8000,
706 0x00008000, 0, avail);
707 if (next_gop == 0) {
708 GST_DEBUG_OBJECT (rtph263ppay, " Found GOB header");
709 found_gob = TRUE;
710 }
711
712 /* Find next and cut the packet accordingly */
713 /* TODO we should get as many gobs as possible until MTU is reached, this
714 * code seems to just get one GOB per packet */
715 if (next_gop == 0 && avail > 3)
716 next_gop =
717 gst_adapter_masked_scan_uint32 (rtph263ppay->adapter, 0xffff8000,
718 0x00008000, 3, avail - 3);
719 GST_DEBUG_OBJECT (rtph263ppay, " Next GOB Detected at : %d", next_gop);
720 if (next_gop == -1)
721 next_gop = 0;
722 }
723
724 /* for picture start frames (non-fragmented), we need to remove the first
725 * two 0x00 bytes and set P=1 */
726 if (!fragmented || found_gob) {
727 gst_adapter_flush (rtph263ppay->adapter, 2);
728 avail -= 2;
729 }
730 header_len = 2;
731
732 towrite = MIN (avail, gst_rtp_buffer_calc_payload_len
733 (GST_RTP_BASE_PAYLOAD_MTU (rtph263ppay) - header_len, 0, 0));
734
735 if (next_gop > 0)
736 towrite = MIN (next_gop, towrite);
737
738 outbuf =
739 gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD
740 (rtph263ppay), header_len, 0, 0);
741
742 gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
743 /* last fragment gets the marker bit set */
744 gst_rtp_buffer_set_marker (&rtp, avail > towrite ? 0 : 1);
745
746 payload = gst_rtp_buffer_get_payload (&rtp);
747
748 /* 0 1
749 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
750 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
751 * | RR |P|V| PLEN |PEBIT|
752 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
753 */
754 /* if fragmented or gop header , write p bit =1 */
755 payload[0] = (fragmented && !found_gob) ? 0x00 : 0x04;
756 payload[1] = 0;
757
758 GST_BUFFER_PTS (outbuf) = rtph263ppay->first_timestamp;
759 GST_BUFFER_DURATION (outbuf) = rtph263ppay->first_duration;
760 gst_rtp_buffer_unmap (&rtp);
761
762 payload_buf = gst_adapter_take_buffer_fast (rtph263ppay->adapter, towrite);
763 gst_rtp_copy_video_meta (rtph263ppay, outbuf, payload_buf);
764 outbuf = gst_buffer_append (outbuf, payload_buf);
765 avail -= towrite;
766
767 /* If more data is available and this is our first iteration,
768 * we create a buffer list and remember that we're fragmented.
769 *
770 * If we're fragmented already, add buffers to the previously
771 * created buffer list.
772 *
773 * Otherwise fragmented will be FALSE and we just push the single output
774 * buffer, and no list is allocated.
775 */
776 if (avail && !fragmented) {
777 fragmented = TRUE;
778 list = gst_buffer_list_new ();
779 gst_buffer_list_add (list, outbuf);
780 } else if (fragmented) {
781 gst_buffer_list_add (list, outbuf);
782 }
783 }
784
785 if (fragmented) {
786 ret =
787 gst_rtp_base_payload_push_list (GST_RTP_BASE_PAYLOAD (rtph263ppay),
788 list);
789 } else {
790 ret =
791 gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtph263ppay), outbuf);
792 }
793
794 return ret;
795 }
796
797 static GstFlowReturn
gst_rtp_h263p_pay_handle_buffer(GstRTPBasePayload * payload,GstBuffer * buffer)798 gst_rtp_h263p_pay_handle_buffer (GstRTPBasePayload * payload,
799 GstBuffer * buffer)
800 {
801 GstRtpH263PPay *rtph263ppay;
802 GstFlowReturn ret;
803
804 rtph263ppay = GST_RTP_H263P_PAY (payload);
805
806 rtph263ppay->first_timestamp = GST_BUFFER_PTS (buffer);
807 rtph263ppay->first_duration = GST_BUFFER_DURATION (buffer);
808
809 /* we always encode and flush a full picture */
810 gst_adapter_push (rtph263ppay->adapter, buffer);
811 ret = gst_rtp_h263p_pay_flush (rtph263ppay);
812
813 return ret;
814 }
815