1 /* GStreamer
2 *
3 * uvch264_mjpg_demux: a demuxer for muxed stream in UVC H264 compliant MJPG
4 *
5 * Copyright (C) 2012 Cisco Systems, Inc.
6 * Author: Youness Alaoui <youness.alaoui@collabora.co.uk>
7 *
8 * This library is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; either
11 * version 2.1 of the License, or (at your option) any later version.
12 *
13 * This library is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
17 *
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with this library; if not, write to the
20 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
21 * Boston, MA 02110-1301, USA.
22 */
23
24 /**
25 * SECTION:element-uvch264mjpgdemux
26 * @title: uvch264mjpgdemux
27 * @short_description: UVC H264 compliant MJPG demuxer
28 *
29 * Parses a MJPG stream from a UVC H264 compliant encoding camera and extracts
30 * each muxed stream into separate pads.
31 *
32 */
33
34 #ifdef HAVE_CONFIG_H
35 #include <config.h>
36 #endif
37
38 #include <string.h>
39 #include <linux/uvcvideo.h>
40 #include <linux/usb/video.h>
41 #include <sys/ioctl.h>
42
43 #ifndef UVCIOC_GET_LAST_SCR
44 #include <time.h>
45
46 struct uvc_last_scr_sample
47 {
48 __u32 dev_frequency;
49 __u32 dev_stc;
50 __u16 dev_sof;
51 struct timespec host_ts;
52 __u16 host_sof;
53 };
54
55 #define UVCIOC_GET_LAST_SCR _IOR('u', 0x23, struct uvc_last_scr_sample)
56 #endif
57
58 #include "gstuvch264_mjpgdemux.h"
59
60 enum
61 {
62 PROP_0,
63 PROP_DEVICE_FD,
64 PROP_NUM_CLOCK_SAMPLES
65 };
66
67 #define DEFAULT_NUM_CLOCK_SAMPLES 32
68
69 static GstStaticPadTemplate mjpgsink_pad_template =
70 GST_STATIC_PAD_TEMPLATE ("sink",
71 GST_PAD_SINK,
72 GST_PAD_ALWAYS,
73 GST_STATIC_CAPS ("image/jpeg, "
74 "width = (int) [ 0, MAX ],"
75 "height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ] ")
76 );
77
78 static GstStaticPadTemplate jpegsrc_pad_template =
79 GST_STATIC_PAD_TEMPLATE ("jpeg",
80 GST_PAD_SRC,
81 GST_PAD_ALWAYS,
82 GST_STATIC_CAPS ("image/jpeg, "
83 "width = (int) [ 0, MAX ],"
84 "height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ] ")
85 );
86
87 static GstStaticPadTemplate h264src_pad_template =
88 GST_STATIC_PAD_TEMPLATE ("h264",
89 GST_PAD_SRC,
90 GST_PAD_ALWAYS,
91 GST_STATIC_CAPS ("video/x-h264, "
92 "width = (int) [ 0, MAX ], "
93 "height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ] ")
94 );
95
96 static GstStaticPadTemplate yuy2src_pad_template =
97 GST_STATIC_PAD_TEMPLATE ("yuy2",
98 GST_PAD_SRC,
99 GST_PAD_ALWAYS,
100 GST_STATIC_CAPS ("video/x-raw, "
101 "format = (string) YUY2, "
102 "width = (int) [ 0, MAX ], "
103 "height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ] ")
104 );
105 static GstStaticPadTemplate nv12src_pad_template =
106 GST_STATIC_PAD_TEMPLATE ("nv12",
107 GST_PAD_SRC,
108 GST_PAD_ALWAYS,
109 GST_STATIC_CAPS ("video/x-raw, "
110 "format = (string) NV12, "
111 "width = (int) [ 0, MAX ], "
112 "height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ] ")
113 );
114
115
116 GST_DEBUG_CATEGORY_STATIC (uvc_h264_mjpg_demux_debug);
117 #define GST_CAT_DEFAULT uvc_h264_mjpg_demux_debug
118
119 static void gst_uvc_h264_mjpg_demux_set_property (GObject * object,
120 guint prop_id, const GValue * value, GParamSpec * pspec);
121 static void gst_uvc_h264_mjpg_demux_get_property (GObject * object,
122 guint prop_id, GValue * value, GParamSpec * pspec);
123 static void gst_uvc_h264_mjpg_demux_dispose (GObject * object);
124 static GstFlowReturn gst_uvc_h264_mjpg_demux_chain (GstPad * pad,
125 GstObject * parent, GstBuffer * buffer);
126 static gboolean gst_uvc_h264_mjpg_demux_sink_event (GstPad * pad,
127 GstObject * parent, GstEvent * event);
128 static gboolean gst_uvc_h264_mjpg_demux_query (GstPad * pad,
129 GstObject * parent, GstQuery * query);
130
131 #define gst_uvc_h264_mjpg_demux_parent_class parent_class
132 G_DEFINE_TYPE (GstUvcH264MjpgDemux, gst_uvc_h264_mjpg_demux, GST_TYPE_ELEMENT);
133 GST_ELEMENT_REGISTER_DEFINE (uvch264mjpgdemux, "uvch264mjpgdemux",
134 GST_RANK_NONE, GST_TYPE_UVC_H264_MJPG_DEMUX);
135
136 static void
gst_uvc_h264_mjpg_demux_class_init(GstUvcH264MjpgDemuxClass * klass)137 gst_uvc_h264_mjpg_demux_class_init (GstUvcH264MjpgDemuxClass * klass)
138 {
139 GObjectClass *gobject_class = (GObjectClass *) klass;
140 GstElementClass *element_class = (GstElementClass *) klass;
141
142 parent_class = g_type_class_peek_parent (klass);
143
144 gobject_class->set_property = gst_uvc_h264_mjpg_demux_set_property;
145 gobject_class->get_property = gst_uvc_h264_mjpg_demux_get_property;
146 gobject_class->dispose = gst_uvc_h264_mjpg_demux_dispose;
147
148 gst_element_class_add_static_pad_template (element_class,
149 &mjpgsink_pad_template);
150 gst_element_class_add_static_pad_template (element_class,
151 &jpegsrc_pad_template);
152 gst_element_class_add_static_pad_template (element_class,
153 &h264src_pad_template);
154 gst_element_class_add_static_pad_template (element_class,
155 &yuy2src_pad_template);
156 gst_element_class_add_static_pad_template (element_class,
157 &nv12src_pad_template);
158
159 gst_element_class_set_static_metadata (element_class,
160 "UVC H264 MJPG Demuxer",
161 "Video/Demuxer",
162 "Demux UVC H264 auxiliary streams from MJPG images",
163 "Youness Alaoui <youness.alaoui@collabora.co.uk>");
164
165 g_object_class_install_property (gobject_class, PROP_DEVICE_FD,
166 g_param_spec_int ("device-fd", "device-fd",
167 "File descriptor of the v4l2 device",
168 -1, G_MAXINT, -1, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
169
170 g_object_class_install_property (gobject_class, PROP_NUM_CLOCK_SAMPLES,
171 g_param_spec_int ("num-clock-samples", "num-clock-samples",
172 "Number of clock samples to gather for the PTS synchronization"
173 " (-1 = unlimited)",
174 0, G_MAXINT, DEFAULT_NUM_CLOCK_SAMPLES,
175 G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
176
177 GST_DEBUG_CATEGORY_INIT (uvc_h264_mjpg_demux_debug,
178 "uvch264mjpgdemux", 0, "UVC H264 MJPG Demuxer");
179 }
180
181 static void
gst_uvc_h264_mjpg_demux_init(GstUvcH264MjpgDemux * self)182 gst_uvc_h264_mjpg_demux_init (GstUvcH264MjpgDemux * self)
183 {
184 self->last_pts = GST_CLOCK_TIME_NONE;
185 self->pts_reordered_warning = FALSE;
186 self->device_fd = -1;
187
188 /* create the sink and src pads */
189 self->sink_pad =
190 gst_pad_new_from_static_template (&mjpgsink_pad_template, "sink");
191 gst_pad_set_chain_function (self->sink_pad,
192 GST_DEBUG_FUNCPTR (gst_uvc_h264_mjpg_demux_chain));
193 gst_pad_set_event_function (self->sink_pad,
194 GST_DEBUG_FUNCPTR (gst_uvc_h264_mjpg_demux_sink_event));
195 gst_pad_set_query_function (self->sink_pad,
196 GST_DEBUG_FUNCPTR (gst_uvc_h264_mjpg_demux_query));
197 gst_element_add_pad (GST_ELEMENT (self), self->sink_pad);
198
199 /* JPEG */
200 self->jpeg_pad =
201 gst_pad_new_from_static_template (&jpegsrc_pad_template, "jpeg");
202 gst_pad_set_query_function (self->jpeg_pad,
203 GST_DEBUG_FUNCPTR (gst_uvc_h264_mjpg_demux_query));
204 gst_element_add_pad (GST_ELEMENT (self), self->jpeg_pad);
205
206 /* H264 */
207 self->h264_pad =
208 gst_pad_new_from_static_template (&h264src_pad_template, "h264");
209 gst_pad_use_fixed_caps (self->h264_pad);
210 gst_element_add_pad (GST_ELEMENT (self), self->h264_pad);
211
212 /* YUY2 */
213 self->yuy2_pad =
214 gst_pad_new_from_static_template (&yuy2src_pad_template, "yuy2");
215 gst_pad_use_fixed_caps (self->yuy2_pad);
216 gst_element_add_pad (GST_ELEMENT (self), self->yuy2_pad);
217
218 /* NV12 */
219 self->nv12_pad =
220 gst_pad_new_from_static_template (&nv12src_pad_template, "nv12");
221 gst_pad_use_fixed_caps (self->nv12_pad);
222 gst_element_add_pad (GST_ELEMENT (self), self->nv12_pad);
223
224 self->h264_caps = gst_caps_new_empty_simple ("video/x-h264");
225 self->yuy2_caps = gst_caps_new_simple ("video/x-raw",
226 "format", G_TYPE_STRING, "YUY2", NULL);
227 self->nv12_caps = gst_caps_new_simple ("video/x-raw",
228 "format", G_TYPE_STRING, "NV12", NULL);
229 self->h264_width = self->h264_height = 0;
230 self->yuy2_width = self->yuy2_height = 0;
231 self->nv12_width = self->nv12_height = 0;
232 }
233
234 static void
gst_uvc_h264_mjpg_demux_dispose(GObject * object)235 gst_uvc_h264_mjpg_demux_dispose (GObject * object)
236 {
237 GstUvcH264MjpgDemux *self = GST_UVC_H264_MJPG_DEMUX (object);
238
239 if (self->h264_caps)
240 gst_caps_unref (self->h264_caps);
241 self->h264_caps = NULL;
242 if (self->yuy2_caps)
243 gst_caps_unref (self->yuy2_caps);
244 self->yuy2_caps = NULL;
245 if (self->nv12_caps)
246 gst_caps_unref (self->nv12_caps);
247 self->nv12_caps = NULL;
248 g_free (self->clock_samples);
249 self->clock_samples = NULL;
250
251 G_OBJECT_CLASS (parent_class)->dispose (object);
252 }
253
254 static void
gst_uvc_h264_mjpg_demux_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)255 gst_uvc_h264_mjpg_demux_set_property (GObject * object,
256 guint prop_id, const GValue * value, GParamSpec * pspec)
257 {
258 GstUvcH264MjpgDemux *self = GST_UVC_H264_MJPG_DEMUX (object);
259
260 switch (prop_id) {
261 case PROP_DEVICE_FD:
262 self->device_fd = g_value_get_int (value);
263 break;
264 case PROP_NUM_CLOCK_SAMPLES:
265 self->num_clock_samples = g_value_get_int (value);
266 if (self->clock_samples) {
267 if (self->num_clock_samples) {
268 self->clock_samples = g_realloc_n (self->clock_samples,
269 self->num_clock_samples, sizeof (GstUvcH264ClockSample));
270 if (self->num_samples > self->num_clock_samples) {
271 self->num_samples = self->num_clock_samples;
272 if (self->last_sample >= self->num_samples)
273 self->last_sample = self->num_samples - 1;
274 }
275 } else {
276 g_free (self->clock_samples);
277 self->clock_samples = NULL;
278 self->last_sample = -1;
279 self->num_samples = 0;
280 }
281 }
282 if (self->num_clock_samples > 0) {
283 self->clock_samples = g_malloc0_n (self->num_clock_samples,
284 sizeof (GstUvcH264ClockSample));
285 self->last_sample = -1;
286 self->num_samples = 0;
287 }
288 break;
289 default:
290 G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec);
291 break;
292 }
293 }
294
295 static void
gst_uvc_h264_mjpg_demux_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)296 gst_uvc_h264_mjpg_demux_get_property (GObject * object,
297 guint prop_id, GValue * value, GParamSpec * pspec)
298 {
299 GstUvcH264MjpgDemux *self = GST_UVC_H264_MJPG_DEMUX (object);
300
301 switch (prop_id) {
302 case PROP_DEVICE_FD:
303 g_value_set_int (value, self->device_fd);
304 break;
305 case PROP_NUM_CLOCK_SAMPLES:
306 g_value_set_int (value, self->num_clock_samples);
307 break;
308 default:
309 G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec);
310 break;
311 }
312 }
313
314 static gboolean
gst_uvc_h264_mjpg_demux_sink_event(GstPad * pad,GstObject * parent,GstEvent * event)315 gst_uvc_h264_mjpg_demux_sink_event (GstPad * pad, GstObject * parent,
316 GstEvent * event)
317 {
318 GstUvcH264MjpgDemux *self = GST_UVC_H264_MJPG_DEMUX (parent);
319 gboolean res;
320
321 switch (GST_EVENT_TYPE (event)) {
322 case GST_EVENT_SEGMENT:
323 gst_event_copy_segment (event, &self->segment);
324 self->last_pts = GST_CLOCK_TIME_NONE;
325 res = gst_pad_push_event (self->jpeg_pad, event);
326 break;
327 case GST_EVENT_CAPS:
328 res = gst_pad_push_event (self->jpeg_pad, event);
329 break;
330 default:
331 res = gst_pad_event_default (pad, parent, event);
332 break;
333 }
334 return res;
335 }
336
337 static gboolean
gst_uvc_h264_mjpg_demux_query(GstPad * pad,GstObject * parent,GstQuery * query)338 gst_uvc_h264_mjpg_demux_query (GstPad * pad, GstObject * parent,
339 GstQuery * query)
340 {
341 GstUvcH264MjpgDemux *self = GST_UVC_H264_MJPG_DEMUX (parent);
342 gboolean ret = FALSE;
343
344 switch (GST_QUERY_TYPE (query)) {
345 case GST_QUERY_CAPS:
346 if (pad == self->sink_pad)
347 ret = gst_pad_peer_query (self->jpeg_pad, query);
348 else
349 ret = gst_pad_peer_query (self->sink_pad, query);
350 break;
351 default:
352 ret = gst_pad_query_default (pad, parent, query);
353 }
354
355 return ret;
356 }
357
358 static gboolean
_pts_to_timestamp(GstUvcH264MjpgDemux * self,GstBuffer * buf,guint32 pts)359 _pts_to_timestamp (GstUvcH264MjpgDemux * self, GstBuffer * buf, guint32 pts)
360 {
361 GstUvcH264ClockSample *current_sample = NULL;
362 GstUvcH264ClockSample *oldest_sample = NULL;
363 guint32 next_sample;
364 struct uvc_last_scr_sample sample;
365 guint32 dev_sof;
366
367 if (self->device_fd == -1 || self->clock_samples == NULL)
368 return FALSE;
369
370 if (-1 == ioctl (self->device_fd, UVCIOC_GET_LAST_SCR, &sample)) {
371 //GST_WARNING_OBJECT (self, " GET_LAST_SCR error");
372 return FALSE;
373 }
374
375 dev_sof = (guint32) (sample.dev_sof + 2048) << 16;
376 if (self->num_samples > 0 &&
377 self->clock_samples[self->last_sample].dev_sof == dev_sof) {
378 current_sample = &self->clock_samples[self->last_sample];
379 } else {
380 next_sample = (self->last_sample + 1) % self->num_clock_samples;
381 current_sample = &self->clock_samples[next_sample];
382 current_sample->dev_stc = sample.dev_stc;
383 current_sample->dev_sof = dev_sof;
384 current_sample->host_ts = sample.host_ts.tv_sec * GST_SECOND +
385 sample.host_ts.tv_nsec * GST_NSECOND;
386 current_sample->host_sof = (guint32) (sample.host_sof + 2048) << 16;
387
388 self->num_samples++;
389 self->last_sample = next_sample;
390
391 /* Debug printing */
392 GST_DEBUG_OBJECT (self, "device frequency: %u", sample.dev_frequency);
393 GST_DEBUG_OBJECT (self, "dev_sof: %u", sample.dev_sof);
394 GST_DEBUG_OBJECT (self, "dev_stc: %u", sample.dev_stc);
395 GST_DEBUG_OBJECT (self,
396 "host_ts: %" G_GUINT64_FORMAT " -- %" GST_TIME_FORMAT,
397 current_sample->host_ts, GST_TIME_ARGS (current_sample->host_ts));
398 GST_DEBUG_OBJECT (self, "host_sof: %u", sample.host_sof);
399 GST_DEBUG_OBJECT (self, "PTS: %u", pts);
400 GST_DEBUG_OBJECT (self, "Diff: %u - %f", sample.dev_stc - pts,
401 (gdouble) (sample.dev_stc - pts) / sample.dev_frequency);
402 }
403
404 if (self->num_samples < self->num_clock_samples)
405 return FALSE;
406
407 next_sample = (self->last_sample + 1) % self->num_clock_samples;
408 oldest_sample = &self->clock_samples[next_sample];
409
410 /* TODO: Use current_sample and oldest_sample to do the
411 * double linear regression and calculate a new PTS */
412 (void) oldest_sample;
413
414 return TRUE;
415 }
416
417 static GstFlowReturn
gst_uvc_h264_mjpg_demux_chain(GstPad * pad,GstObject * parent,GstBuffer * buf)418 gst_uvc_h264_mjpg_demux_chain (GstPad * pad,
419 GstObject * parent, GstBuffer * buf)
420 {
421 GstUvcH264MjpgDemux *self;
422 GstFlowReturn ret = GST_FLOW_OK;
423 GstBuffer *jpeg_buf = NULL;
424 GstBuffer *aux_buf = NULL;
425 AuxiliaryStreamHeader aux_header = { 0 };
426 guint32 aux_size = 0;
427 GstPad *aux_pad = NULL;
428 GstCaps **aux_caps = NULL;
429 guint last_offset;
430 guint i;
431 GstMapInfo info;
432 guint16 segment_size;
433
434 self = GST_UVC_H264_MJPG_DEMUX (GST_PAD_PARENT (pad));
435
436 if (gst_buffer_get_size (buf) == 0) {
437 return gst_pad_push (self->jpeg_pad, buf);
438 }
439
440 last_offset = 0;
441 gst_buffer_map (buf, &info, GST_MAP_READ);
442
443 jpeg_buf = gst_buffer_copy_region (buf, GST_BUFFER_COPY_METADATA, 0, 0);
444
445 for (i = 0; i < info.size - 1; i++) {
446 /* Check for APP4 (0xe4) marker in the jpeg */
447 if (info.data[i] == 0xff && info.data[i + 1] == 0xe4) {
448
449 /* Sanity check sizes and get segment size */
450 if (i + 4 >= info.size) {
451 GST_ELEMENT_ERROR (self, STREAM, DEMUX,
452 ("Not enough data to read marker size"), (NULL));
453 ret = GST_FLOW_ERROR;
454 goto done;
455 }
456 segment_size = GUINT16_FROM_BE (*((guint16 *) (info.data + i + 2)));
457
458 if (i + segment_size + 2 >= info.size) {
459 GST_ELEMENT_ERROR (self, STREAM, DEMUX,
460 ("Not enough data to read marker content"), (NULL));
461 ret = GST_FLOW_ERROR;
462 goto done;
463 }
464 GST_DEBUG_OBJECT (self,
465 "Found APP4 marker (%d). JPG: %d-%d - APP4: %d - %d", segment_size,
466 last_offset, i, i, i + 2 + segment_size);
467
468 /* Add JPEG data between the last offset and this market */
469 if (i - last_offset > 0) {
470 GstMemory *m = gst_memory_copy (info.memory, last_offset,
471 i - last_offset);
472 gst_buffer_append_memory (jpeg_buf, m);
473 }
474 last_offset = i + 2 + segment_size;
475
476 /* Reset i/segment size to the app4 data (ignore marker header/size) */
477 i += 4;
478 segment_size -= 2;
479
480 /* If this is a new auxiliary stream, initialize everything properly */
481 if (aux_buf == NULL) {
482 if (segment_size < sizeof (aux_header) + sizeof (aux_size)) {
483 GST_ELEMENT_ERROR (self, STREAM, DEMUX,
484 ("Not enough data to read aux header"), (NULL));
485 ret = GST_FLOW_ERROR;
486 goto done;
487 }
488
489 aux_header = *((AuxiliaryStreamHeader *) (info.data + i));
490 /* version should be little endian but it looks more like BE */
491 aux_header.version = GUINT16_FROM_BE (aux_header.version);
492 aux_header.header_len = GUINT16_FROM_LE (aux_header.header_len);
493 aux_header.width = GUINT16_FROM_LE (aux_header.width);
494 aux_header.height = GUINT16_FROM_LE (aux_header.height);
495 aux_header.frame_interval = GUINT32_FROM_LE (aux_header.frame_interval);
496 aux_header.delay = GUINT16_FROM_LE (aux_header.delay);
497 aux_header.pts = GUINT32_FROM_LE (aux_header.pts);
498 GST_DEBUG_OBJECT (self, "New auxiliary stream : v%d - %d bytes - %"
499 GST_FOURCC_FORMAT " %dx%d -- %d *100ns -- %d ms -- %d",
500 aux_header.version, aux_header.header_len,
501 GST_FOURCC_ARGS (aux_header.type),
502 aux_header.width, aux_header.height,
503 aux_header.frame_interval, aux_header.delay, aux_header.pts);
504 aux_size = *((guint32 *) (info.data + i + aux_header.header_len));
505 GST_DEBUG_OBJECT (self, "Auxiliary stream size : %d bytes", aux_size);
506
507 if (aux_size > 0) {
508 guint16 *width = NULL;
509 guint16 *height = NULL;
510
511 /* Find the auxiliary stream's pad and caps */
512 switch (aux_header.type) {
513 case GST_MAKE_FOURCC ('H', '2', '6', '4'):
514 aux_pad = self->h264_pad;
515 aux_caps = &self->h264_caps;
516 width = &self->h264_width;
517 height = &self->h264_height;
518 break;
519 case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
520 aux_pad = self->yuy2_pad;
521 aux_caps = &self->yuy2_caps;
522 width = &self->yuy2_width;
523 height = &self->yuy2_height;
524 break;
525 case GST_MAKE_FOURCC ('N', 'V', '1', '2'):
526 aux_pad = self->nv12_pad;
527 aux_caps = &self->nv12_caps;
528 width = &self->nv12_width;
529 height = &self->nv12_height;
530 break;
531 default:
532 GST_ELEMENT_ERROR (self, STREAM, DEMUX,
533 ("Unknown auxiliary stream format : %" GST_FOURCC_FORMAT,
534 GST_FOURCC_ARGS (aux_header.type)), (NULL));
535 ret = GST_FLOW_ERROR;
536 break;
537 }
538
539 if (ret != GST_FLOW_OK)
540 goto done;
541
542 if (*width != aux_header.width || *height != aux_header.height) {
543 GstCaps *peercaps = gst_pad_peer_query_caps (aux_pad, NULL);
544 GstStructure *s = NULL;
545 gint fps_num = 1000000000 / aux_header.frame_interval;
546 gint fps_den = 100;
547
548 /* TODO: intersect with pad template */
549 GST_DEBUG ("peercaps : %" GST_PTR_FORMAT, peercaps);
550 if (peercaps && !gst_caps_is_any (peercaps)) {
551 peercaps = gst_caps_make_writable (peercaps);
552 s = gst_caps_get_structure (peercaps, 0);
553 }
554 if (s && gst_structure_has_field (s, "framerate")) {
555 /* TODO: make sure it contains the right format/width/height */
556 gst_structure_fixate_field_nearest_fraction (s, "framerate",
557 fps_num, fps_den);
558 GST_DEBUG ("Fixated struct : %" GST_PTR_FORMAT, s);
559 gst_structure_get_fraction (s, "framerate", &fps_num, &fps_den);
560 }
561 if (peercaps)
562 gst_caps_unref (peercaps);
563
564 *width = aux_header.width;
565 *height = aux_header.height;
566 *aux_caps = gst_caps_make_writable (*aux_caps);
567 /* FIXME: fps must match the caps and be allowed and represent
568 our first buffer */
569 gst_caps_set_simple (*aux_caps,
570 "width", G_TYPE_INT, aux_header.width,
571 "height", G_TYPE_INT, aux_header.height,
572 "framerate", GST_TYPE_FRACTION, fps_num, fps_den, NULL);
573 gst_pad_push_event (aux_pad, gst_event_new_caps (*aux_caps));
574 gst_pad_push_event (aux_pad,
575 gst_event_new_segment (&self->segment));
576 }
577
578 /* Create new auxiliary buffer list and adjust i/segment size */
579 aux_buf = gst_buffer_new ();
580 }
581
582 i += sizeof (aux_header) + sizeof (aux_size);
583 segment_size -= sizeof (aux_header) + sizeof (aux_size);
584 }
585
586 if (segment_size > aux_size) {
587 GST_ELEMENT_ERROR (self, STREAM, DEMUX,
588 ("Expected %d auxiliary data, got %d bytes", aux_size,
589 segment_size), (NULL));
590 ret = GST_FLOW_ERROR;
591 goto done;
592 }
593
594 if (segment_size > 0) {
595 GstMemory *m;
596 m = gst_memory_copy (info.memory, i, segment_size);
597
598 GST_BUFFER_DURATION (aux_buf) =
599 aux_header.frame_interval * 100 * GST_NSECOND;
600
601 _pts_to_timestamp (self, aux_buf, aux_header.pts);
602
603 gst_buffer_append_memory (aux_buf, m);
604
605 aux_size -= segment_size;
606
607 /* Push completed aux data */
608 if (aux_size == 0) {
609 /* Last attempt to apply timestamp. FIXME: This
610 * is broken for H.264 with B-frames */
611 if (GST_BUFFER_PTS (aux_buf) == GST_CLOCK_TIME_NONE) {
612 if (!self->pts_reordered_warning &&
613 self->last_pts != GST_CLOCK_TIME_NONE &&
614 self->last_pts > GST_BUFFER_PTS (buf)) {
615 GST_WARNING_OBJECT (self, "PTS went backward, timestamping "
616 "might be broken");
617 self->pts_reordered_warning = TRUE;
618 }
619 self->last_pts = GST_BUFFER_PTS (buf);
620
621 GST_BUFFER_PTS (aux_buf) = GST_BUFFER_PTS (buf);
622 }
623 if (GST_BUFFER_DTS (aux_buf) == GST_CLOCK_TIME_NONE) {
624 GstClockTime dts = GST_BUFFER_PTS (aux_buf);
625 GstClockTime delay = aux_header.delay * GST_MSECOND;
626 if (dts > delay)
627 dts -= delay;
628 else
629 dts = 0;
630 GST_BUFFER_DTS (aux_buf) = dts;
631 GST_LOG_OBJECT (self, "Applied DTS %" GST_TIME_FORMAT
632 " to aux_buf", GST_TIME_ARGS (dts));
633 }
634
635 GST_DEBUG_OBJECT (self, "Pushing %" GST_FOURCC_FORMAT
636 " auxiliary buffer %" GST_PTR_FORMAT,
637 GST_FOURCC_ARGS (aux_header.type), *aux_caps);
638 ret = gst_pad_push (aux_pad, aux_buf);
639 aux_buf = NULL;
640 if (ret != GST_FLOW_OK) {
641 GST_WARNING_OBJECT (self, "Error pushing %" GST_FOURCC_FORMAT
642 " auxiliary data", GST_FOURCC_ARGS (aux_header.type));
643 goto done;
644 }
645 }
646 }
647
648 i += segment_size - 1;
649 } else if (info.data[i] == 0xff && info.data[i + 1] == 0xda) {
650 GstMemory *m;
651
652 /* The APP4 markers must be before the SOS marker, so this is the end */
653 GST_DEBUG_OBJECT (self, "Found SOS marker.");
654
655 m = gst_memory_copy (info.memory, last_offset, info.size - last_offset);
656 gst_buffer_append_memory (jpeg_buf, m);
657 last_offset = info.size;
658 break;
659 }
660 }
661
662 if (aux_buf != NULL) {
663 GST_DEBUG_OBJECT (self, "Incomplete auxiliary stream: %d bytes missing, "
664 "%d segment size remaining -- missing segment, C920 bug?",
665 aux_size, segment_size);
666 ret = GST_FLOW_OK;
667 goto done;
668 }
669
670 if (last_offset != info.size) {
671 /* this means there was no SOS marker in the jpg, so we assume the JPG was
672 just a container */
673 GST_DEBUG_OBJECT (self, "SOS marker wasn't found. MJPG is container only");
674 gst_buffer_unref (jpeg_buf);
675 jpeg_buf = NULL;
676 } else {
677 ret = gst_pad_push (self->jpeg_pad, jpeg_buf);
678 jpeg_buf = NULL;
679 }
680
681 if (ret != GST_FLOW_OK) {
682 GST_WARNING_OBJECT (self, "Error pushing jpeg data");
683 goto done;
684 }
685
686 done:
687 /* In case of error, unref whatever was left */
688 if (aux_buf)
689 gst_buffer_unref (aux_buf);
690 if (jpeg_buf)
691 gst_buffer_unref (jpeg_buf);
692
693 gst_buffer_unmap (buf, &info);
694
695 /* We must always unref the input buffer since we never push it out */
696 gst_buffer_unref (buf);
697
698 return ret;
699 }
700