1 /* GStreamer
2 * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
3 * Copyright (C) <2009> Tim-Philipp Müller <tim centricular net>
4 * Copyright (C) 2012 Collabora Ltd.
5 * Author : Edward Hervey <edward@collabora.com>
6 *
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Library General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
11 *
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Library General Public License for more details.
16 *
17 * You should have received a copy of the GNU Library General Public
18 * License along with this library; if not, write to the
19 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
20 * Boston, MA 02110-1301, USA.
21 */
22
23 /**
24 * SECTION:element-jpegdec
25 * @title: jpegdec
26 *
27 * Decodes jpeg images.
28 *
29 * ## Example launch line
30 * |[
31 * gst-launch-1.0 -v filesrc location=mjpeg.avi ! avidemux ! queue ! jpegdec ! videoconvert ! videoscale ! autovideosink
32 * ]| The above pipeline decode the mjpeg stream and renders it to the screen.
33 *
34 */
35
36 #ifdef HAVE_CONFIG_H
37 #include "config.h"
38 #endif
39 #include <string.h>
40
41 #include "gstjpeg.h"
42 #include "gstjpegdec.h"
43 #include "gstjpegelements.h"
44 #include <gst/video/video.h>
45 #include <gst/video/gstvideometa.h>
46 #include <gst/video/gstvideopool.h>
47 #include "gst/gst-i18n-plugin.h"
48 #include <jerror.h>
49
50 #define MIN_WIDTH 1
51 #define MAX_WIDTH 65535
52 #define MIN_HEIGHT 1
53 #define MAX_HEIGHT 65535
54
55 #define CINFO_GET_JPEGDEC(cinfo_ptr) \
56 (((struct GstJpegDecSourceMgr*)((cinfo_ptr)->src))->dec)
57
58 #define JPEG_DEFAULT_IDCT_METHOD JDCT_FASTEST
59 #define JPEG_DEFAULT_MAX_ERRORS 0
60
61 enum
62 {
63 PROP_0,
64 PROP_IDCT_METHOD,
65 PROP_MAX_ERRORS
66 };
67
68 /* *INDENT-OFF* */
69 static GstStaticPadTemplate gst_jpeg_dec_src_pad_template =
70 GST_STATIC_PAD_TEMPLATE ("src",
71 GST_PAD_SRC,
72 GST_PAD_ALWAYS,
73 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
74 ("{ I420, RGB, BGR, RGBx, xRGB, BGRx, xBGR, GRAY8 }"))
75 );
76 /* *INDENT-ON* */
77
78 /* FIXME: sof-marker is for IJG libjpeg 8, should be different for 6.2 */
79 /* FIXME: add back "sof-marker = (int) { 0, 1, 2, 5, 6, 7, 9, 10, 13, 14 }"
80 * once we have a parser and/or demuxer set caps properly */
81 static GstStaticPadTemplate gst_jpeg_dec_sink_pad_template =
82 GST_STATIC_PAD_TEMPLATE ("sink",
83 GST_PAD_SINK,
84 GST_PAD_ALWAYS,
85 GST_STATIC_CAPS ("image/jpeg")
86 );
87
88 GST_DEBUG_CATEGORY_STATIC (jpeg_dec_debug);
89 #define GST_CAT_DEFAULT jpeg_dec_debug
90 GST_DEBUG_CATEGORY_STATIC (GST_CAT_PERFORMANCE);
91
92 static void gst_jpeg_dec_set_property (GObject * object, guint prop_id,
93 const GValue * value, GParamSpec * pspec);
94 static void gst_jpeg_dec_get_property (GObject * object, guint prop_id,
95 GValue * value, GParamSpec * pspec);
96
97 static gboolean gst_jpeg_dec_set_format (GstVideoDecoder * dec,
98 GstVideoCodecState * state);
99 static gboolean gst_jpeg_dec_start (GstVideoDecoder * bdec);
100 static gboolean gst_jpeg_dec_stop (GstVideoDecoder * bdec);
101 static gboolean gst_jpeg_dec_flush (GstVideoDecoder * bdec);
102 static GstFlowReturn gst_jpeg_dec_parse (GstVideoDecoder * bdec,
103 GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos);
104 static GstFlowReturn gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec,
105 GstVideoCodecFrame * frame);
106 static gboolean gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec,
107 GstQuery * query);
108 static gboolean gst_jpeg_dec_sink_event (GstVideoDecoder * bdec,
109 GstEvent * event);
110
111 #define gst_jpeg_dec_parent_class parent_class
112 G_DEFINE_TYPE (GstJpegDec, gst_jpeg_dec, GST_TYPE_VIDEO_DECODER);
113 GST_ELEMENT_REGISTER_DEFINE (jpegdec, "jpegdec", GST_RANK_PRIMARY,
114 GST_TYPE_JPEG_DEC);
115
116 static void
gst_jpeg_dec_finalize(GObject * object)117 gst_jpeg_dec_finalize (GObject * object)
118 {
119 GstJpegDec *dec = GST_JPEG_DEC (object);
120
121 jpeg_destroy_decompress (&dec->cinfo);
122 if (dec->input_state)
123 gst_video_codec_state_unref (dec->input_state);
124
125 G_OBJECT_CLASS (parent_class)->finalize (object);
126 }
127
128 static void
gst_jpeg_dec_class_init(GstJpegDecClass * klass)129 gst_jpeg_dec_class_init (GstJpegDecClass * klass)
130 {
131 GObjectClass *gobject_class;
132 GstElementClass *element_class;
133 GstVideoDecoderClass *vdec_class;
134
135 gobject_class = (GObjectClass *) klass;
136 element_class = (GstElementClass *) klass;
137 vdec_class = (GstVideoDecoderClass *) klass;
138
139 parent_class = g_type_class_peek_parent (klass);
140
141 gobject_class->finalize = gst_jpeg_dec_finalize;
142 gobject_class->set_property = gst_jpeg_dec_set_property;
143 gobject_class->get_property = gst_jpeg_dec_get_property;
144
145 g_object_class_install_property (gobject_class, PROP_IDCT_METHOD,
146 g_param_spec_enum ("idct-method", "IDCT Method",
147 "The IDCT algorithm to use", GST_TYPE_IDCT_METHOD,
148 JPEG_DEFAULT_IDCT_METHOD,
149 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
150
151 /**
152 * GstJpegDec:max-errors:
153 *
154 * Error out after receiving N consecutive decoding errors
155 * (-1 = never error out, 0 = automatic, 1 = fail on first error, etc.)
156 *
157 * Deprecated: 1.3.1: Property wasn't used internally
158 */
159 #ifndef GST_REMOVE_DEPRECATED
160 g_object_class_install_property (gobject_class, PROP_MAX_ERRORS,
161 g_param_spec_int ("max-errors", "Maximum Consecutive Decoding Errors",
162 "(Deprecated) Error out after receiving N consecutive decoding errors"
163 " (-1 = never fail, 0 = automatic, 1 = fail on first error)",
164 -1, G_MAXINT, JPEG_DEFAULT_MAX_ERRORS,
165 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED));
166 #endif
167
168 gst_element_class_add_static_pad_template (element_class,
169 &gst_jpeg_dec_src_pad_template);
170 gst_element_class_add_static_pad_template (element_class,
171 &gst_jpeg_dec_sink_pad_template);
172 gst_element_class_set_static_metadata (element_class, "JPEG image decoder",
173 "Codec/Decoder/Image", "Decode images from JPEG format",
174 "Wim Taymans <wim@fluendo.com>");
175
176 vdec_class->start = gst_jpeg_dec_start;
177 vdec_class->stop = gst_jpeg_dec_stop;
178 vdec_class->flush = gst_jpeg_dec_flush;
179 vdec_class->parse = gst_jpeg_dec_parse;
180 vdec_class->set_format = gst_jpeg_dec_set_format;
181 vdec_class->handle_frame = gst_jpeg_dec_handle_frame;
182 vdec_class->decide_allocation = gst_jpeg_dec_decide_allocation;
183 vdec_class->sink_event = gst_jpeg_dec_sink_event;
184
185 GST_DEBUG_CATEGORY_INIT (jpeg_dec_debug, "jpegdec", 0, "JPEG decoder");
186 GST_DEBUG_CATEGORY_GET (GST_CAT_PERFORMANCE, "GST_PERFORMANCE");
187
188 gst_type_mark_as_plugin_api (GST_TYPE_IDCT_METHOD, 0);
189 }
190
191 static boolean
gst_jpeg_dec_fill_input_buffer(j_decompress_ptr cinfo)192 gst_jpeg_dec_fill_input_buffer (j_decompress_ptr cinfo)
193 {
194 /* We pass in full frame initially, if this get called, the frame is most likely
195 * corrupted */
196 return FALSE;
197 }
198
199 static void
gst_jpeg_dec_init_source(j_decompress_ptr cinfo)200 gst_jpeg_dec_init_source (j_decompress_ptr cinfo)
201 {
202 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "init_source");
203 }
204
205
206 static void
gst_jpeg_dec_skip_input_data(j_decompress_ptr cinfo,glong num_bytes)207 gst_jpeg_dec_skip_input_data (j_decompress_ptr cinfo, glong num_bytes)
208 {
209 GstJpegDec *dec = CINFO_GET_JPEGDEC (cinfo);
210
211 GST_DEBUG_OBJECT (dec, "skip %ld bytes", num_bytes);
212
213 if (num_bytes > 0 && cinfo->src->bytes_in_buffer >= num_bytes) {
214 cinfo->src->next_input_byte += (size_t) num_bytes;
215 cinfo->src->bytes_in_buffer -= (size_t) num_bytes;
216 }
217 }
218
219 static boolean
gst_jpeg_dec_resync_to_restart(j_decompress_ptr cinfo,gint desired)220 gst_jpeg_dec_resync_to_restart (j_decompress_ptr cinfo, gint desired)
221 {
222 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "resync_to_start");
223 return TRUE;
224 }
225
226 static void
gst_jpeg_dec_term_source(j_decompress_ptr cinfo)227 gst_jpeg_dec_term_source (j_decompress_ptr cinfo)
228 {
229 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "term_source");
230 return;
231 }
232
233 METHODDEF (void)
gst_jpeg_dec_my_output_message(j_common_ptr cinfo)234 gst_jpeg_dec_my_output_message (j_common_ptr cinfo)
235 {
236 return; /* do nothing */
237 }
238
239 METHODDEF (void)
gst_jpeg_dec_my_emit_message(j_common_ptr cinfo,int msg_level)240 gst_jpeg_dec_my_emit_message (j_common_ptr cinfo, int msg_level)
241 {
242 /* GST_LOG_OBJECT (CINFO_GET_JPEGDEC (&cinfo), "msg_level=%d", msg_level); */
243 return;
244 }
245
246 METHODDEF (void)
gst_jpeg_dec_my_error_exit(j_common_ptr cinfo)247 gst_jpeg_dec_my_error_exit (j_common_ptr cinfo)
248 {
249 struct GstJpegDecErrorMgr *err_mgr = (struct GstJpegDecErrorMgr *) cinfo->err;
250
251 (*cinfo->err->output_message) (cinfo);
252 longjmp (err_mgr->setjmp_buffer, 1);
253 }
254
255 static void
gst_jpeg_dec_init(GstJpegDec * dec)256 gst_jpeg_dec_init (GstJpegDec * dec)
257 {
258 GST_DEBUG ("initializing");
259
260 /* setup jpeglib */
261 memset (&dec->cinfo, 0, sizeof (dec->cinfo));
262 memset (&dec->jerr, 0, sizeof (dec->jerr));
263 dec->cinfo.err = jpeg_std_error (&dec->jerr.pub);
264 dec->jerr.pub.output_message = gst_jpeg_dec_my_output_message;
265 dec->jerr.pub.emit_message = gst_jpeg_dec_my_emit_message;
266 dec->jerr.pub.error_exit = gst_jpeg_dec_my_error_exit;
267
268 jpeg_create_decompress (&dec->cinfo);
269
270 dec->cinfo.src = (struct jpeg_source_mgr *) &dec->jsrc;
271 dec->cinfo.src->init_source = gst_jpeg_dec_init_source;
272 dec->cinfo.src->fill_input_buffer = gst_jpeg_dec_fill_input_buffer;
273 dec->cinfo.src->skip_input_data = gst_jpeg_dec_skip_input_data;
274 dec->cinfo.src->resync_to_restart = gst_jpeg_dec_resync_to_restart;
275 dec->cinfo.src->term_source = gst_jpeg_dec_term_source;
276 dec->jsrc.dec = dec;
277
278 /* init properties */
279 dec->idct_method = JPEG_DEFAULT_IDCT_METHOD;
280 dec->max_errors = JPEG_DEFAULT_MAX_ERRORS;
281
282 gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
283 (dec), TRUE);
284 GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (dec));
285 }
286
287 static inline gboolean
gst_jpeg_dec_parse_tag_has_entropy_segment(guint8 tag)288 gst_jpeg_dec_parse_tag_has_entropy_segment (guint8 tag)
289 {
290 if (tag == 0xda || (tag >= 0xd0 && tag <= 0xd7))
291 return TRUE;
292 return FALSE;
293 }
294
295 static GstFlowReturn
gst_jpeg_dec_parse(GstVideoDecoder * bdec,GstVideoCodecFrame * frame,GstAdapter * adapter,gboolean at_eos)296 gst_jpeg_dec_parse (GstVideoDecoder * bdec, GstVideoCodecFrame * frame,
297 GstAdapter * adapter, gboolean at_eos)
298 {
299 guint size;
300 gint toadd = 0;
301 gboolean resync;
302 gint offset = 0, noffset;
303 GstJpegDec *dec = (GstJpegDec *) bdec;
304
305 GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
306
307 /* FIXME : The overhead of using scan_uint32 is massive */
308
309 size = gst_adapter_available (adapter);
310 GST_DEBUG ("Parsing jpeg image data (%u bytes)", size);
311
312 if (at_eos) {
313 GST_DEBUG ("Flushing all data out");
314 toadd = size;
315
316 /* If we have leftover data, throw it away */
317 if (!dec->saw_header)
318 goto drop_frame;
319 goto have_full_frame;
320 }
321
322 if (size < 8)
323 goto need_more_data;
324
325 if (!dec->saw_header) {
326 gint ret;
327 /* we expect at least 4 bytes, first of which start marker */
328 ret =
329 gst_adapter_masked_scan_uint32 (adapter, 0xffff0000, 0xffd80000, 0,
330 size - 4);
331
332 GST_DEBUG ("ret:%d", ret);
333 if (ret < 0)
334 goto need_more_data;
335
336 if (ret) {
337 gst_adapter_flush (adapter, ret);
338 size -= ret;
339 }
340 dec->saw_header = TRUE;
341 }
342
343 while (1) {
344 guint frame_len;
345 guint32 value;
346
347 GST_DEBUG ("offset:%d, size:%d", offset, size);
348
349 noffset =
350 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
351 offset, size - offset, &value);
352
353 /* lost sync if 0xff marker not where expected */
354 if ((resync = (noffset != offset))) {
355 GST_DEBUG ("Lost sync at 0x%08x, resyncing", offset + 2);
356 }
357 /* may have marker, but could have been resyncng */
358 resync = resync || dec->parse_resync;
359 /* Skip over extra 0xff */
360 while ((noffset >= 0) && ((value & 0xff) == 0xff)) {
361 noffset++;
362 noffset =
363 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
364 noffset, size - noffset, &value);
365 }
366 /* enough bytes left for marker? (we need 0xNN after the 0xff) */
367 if (noffset < 0) {
368 GST_DEBUG ("at end of input and no EOI marker found, need more data");
369 goto need_more_data;
370 }
371
372 /* now lock on the marker we found */
373 offset = noffset;
374 value = value & 0xff;
375 if (value == 0xd9) {
376 GST_DEBUG ("0x%08x: EOI marker", offset + 2);
377 /* clear parse state */
378 dec->saw_header = FALSE;
379 dec->parse_resync = FALSE;
380 toadd = offset + 4;
381 goto have_full_frame;
382 }
383 if (value == 0xd8) {
384 GST_DEBUG ("0x%08x: SOI marker before EOI marker", offset + 2);
385
386 /* clear parse state */
387 dec->saw_header = FALSE;
388 dec->parse_resync = FALSE;
389 toadd = offset;
390 goto have_full_frame;
391 }
392
393
394 if (value >= 0xd0 && value <= 0xd7)
395 frame_len = 0;
396 else {
397 /* peek tag and subsequent length */
398 if (offset + 2 + 4 > size)
399 goto need_more_data;
400 else
401 gst_adapter_masked_scan_uint32_peek (adapter, 0x0, 0x0, offset + 2, 4,
402 &frame_len);
403 frame_len = frame_len & 0xffff;
404 }
405 GST_DEBUG ("0x%08x: tag %02x, frame_len=%u", offset + 2, value, frame_len);
406 /* the frame length includes the 2 bytes for the length; here we want at
407 * least 2 more bytes at the end for an end marker */
408 if (offset + 2 + 2 + frame_len + 2 > size) {
409 goto need_more_data;
410 }
411
412 if (gst_jpeg_dec_parse_tag_has_entropy_segment (value)) {
413 guint eseglen = dec->parse_entropy_len;
414
415 GST_DEBUG ("0x%08x: finding entropy segment length (eseglen:%d)",
416 offset + 2, eseglen);
417 if (size < offset + 2 + frame_len + eseglen)
418 goto need_more_data;
419 noffset = offset + 2 + frame_len + dec->parse_entropy_len;
420 while (1) {
421 GST_DEBUG ("noffset:%d, size:%d, size - noffset:%d",
422 noffset, size, size - noffset);
423 noffset = gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00,
424 0x0000ff00, noffset, size - noffset, &value);
425 if (noffset < 0) {
426 /* need more data */
427 dec->parse_entropy_len = size - offset - 4 - frame_len - 2;
428 goto need_more_data;
429 }
430 if ((value & 0xff) != 0x00) {
431 eseglen = noffset - offset - frame_len - 2;
432 break;
433 }
434 noffset++;
435 }
436 dec->parse_entropy_len = 0;
437 frame_len += eseglen;
438 GST_DEBUG ("entropy segment length=%u => frame_len=%u", eseglen,
439 frame_len);
440 }
441 if (resync) {
442 /* check if we will still be in sync if we interpret
443 * this as a sync point and skip this frame */
444 noffset = offset + frame_len + 2;
445 noffset = gst_adapter_masked_scan_uint32 (adapter, 0x0000ff00, 0x0000ff00,
446 noffset, 4);
447 if (noffset < 0) {
448 /* ignore and continue resyncing until we hit the end
449 * of our data or find a sync point that looks okay */
450 offset++;
451 continue;
452 }
453 GST_DEBUG ("found sync at 0x%x", offset + 2);
454 }
455
456 /* Add current data to output buffer */
457 toadd += frame_len + 2;
458 offset += frame_len + 2;
459 }
460
461 need_more_data:
462 if (toadd)
463 gst_video_decoder_add_to_frame (bdec, toadd);
464 return GST_VIDEO_DECODER_FLOW_NEED_DATA;
465
466 have_full_frame:
467 if (toadd)
468 gst_video_decoder_add_to_frame (bdec, toadd);
469 GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
470 return gst_video_decoder_have_frame (bdec);
471
472 drop_frame:
473 gst_adapter_flush (adapter, size);
474 return GST_FLOW_OK;
475 }
476
477
478 /* shamelessly ripped from jpegutils.c in mjpegtools */
479 static void
add_huff_table(j_decompress_ptr dinfo,JHUFF_TBL ** htblptr,const UINT8 * bits,const UINT8 * val)480 add_huff_table (j_decompress_ptr dinfo,
481 JHUFF_TBL ** htblptr, const UINT8 * bits, const UINT8 * val)
482 /* Define a Huffman table */
483 {
484 int nsymbols, len;
485
486 if (*htblptr == NULL)
487 *htblptr = jpeg_alloc_huff_table ((j_common_ptr) dinfo);
488
489 g_assert (*htblptr);
490
491 /* Copy the number-of-symbols-of-each-code-length counts */
492 memcpy ((*htblptr)->bits, bits, sizeof ((*htblptr)->bits));
493
494 /* Validate the counts. We do this here mainly so we can copy the right
495 * number of symbols from the val[] array, without risking marching off
496 * the end of memory. jchuff.c will do a more thorough test later.
497 */
498 nsymbols = 0;
499 for (len = 1; len <= 16; len++)
500 nsymbols += bits[len];
501 if (nsymbols < 1 || nsymbols > 256)
502 g_error ("jpegutils.c: add_huff_table failed badly. ");
503
504 memcpy ((*htblptr)->huffval, val, nsymbols * sizeof (UINT8));
505 }
506
507
508
509 static void
std_huff_tables(j_decompress_ptr dinfo)510 std_huff_tables (j_decompress_ptr dinfo)
511 /* Set up the standard Huffman tables (cf. JPEG standard section K.3) */
512 /* IMPORTANT: these are only valid for 8-bit data precision! */
513 {
514 static const UINT8 bits_dc_luminance[17] =
515 { /* 0-base */ 0, 0, 1, 5, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0 };
516 static const UINT8 val_dc_luminance[] =
517 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
518
519 static const UINT8 bits_dc_chrominance[17] =
520 { /* 0-base */ 0, 0, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0 };
521 static const UINT8 val_dc_chrominance[] =
522 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
523
524 static const UINT8 bits_ac_luminance[17] =
525 { /* 0-base */ 0, 0, 2, 1, 3, 3, 2, 4, 3, 5, 5, 4, 4, 0, 0, 1, 0x7d };
526 static const UINT8 val_ac_luminance[] =
527 { 0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12,
528 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07,
529 0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xa1, 0x08,
530 0x23, 0x42, 0xb1, 0xc1, 0x15, 0x52, 0xd1, 0xf0,
531 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16,
532 0x17, 0x18, 0x19, 0x1a, 0x25, 0x26, 0x27, 0x28,
533 0x29, 0x2a, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39,
534 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49,
535 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59,
536 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69,
537 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79,
538 0x7a, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89,
539 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98,
540 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
541 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6,
542 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4, 0xc5,
543 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4,
544 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xe1, 0xe2,
545 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea,
546 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
547 0xf9, 0xfa
548 };
549
550 static const UINT8 bits_ac_chrominance[17] =
551 { /* 0-base */ 0, 0, 2, 1, 2, 4, 4, 3, 4, 7, 5, 4, 4, 0, 1, 2, 0x77 };
552 static const UINT8 val_ac_chrominance[] =
553 { 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21,
554 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71,
555 0x13, 0x22, 0x32, 0x81, 0x08, 0x14, 0x42, 0x91,
556 0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33, 0x52, 0xf0,
557 0x15, 0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34,
558 0xe1, 0x25, 0xf1, 0x17, 0x18, 0x19, 0x1a, 0x26,
559 0x27, 0x28, 0x29, 0x2a, 0x35, 0x36, 0x37, 0x38,
560 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
561 0x49, 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58,
562 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
563 0x69, 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78,
564 0x79, 0x7a, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
565 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96,
566 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5,
567 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4,
568 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3,
569 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2,
570 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda,
571 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9,
572 0xea, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
573 0xf9, 0xfa
574 };
575
576 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[0],
577 bits_dc_luminance, val_dc_luminance);
578 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[0],
579 bits_ac_luminance, val_ac_luminance);
580 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[1],
581 bits_dc_chrominance, val_dc_chrominance);
582 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[1],
583 bits_ac_chrominance, val_ac_chrominance);
584 }
585
586
587
588 static void
guarantee_huff_tables(j_decompress_ptr dinfo)589 guarantee_huff_tables (j_decompress_ptr dinfo)
590 {
591 if ((dinfo->dc_huff_tbl_ptrs[0] == NULL) &&
592 (dinfo->dc_huff_tbl_ptrs[1] == NULL) &&
593 (dinfo->ac_huff_tbl_ptrs[0] == NULL) &&
594 (dinfo->ac_huff_tbl_ptrs[1] == NULL)) {
595 GST_DEBUG ("Generating standard Huffman tables for this frame.");
596 std_huff_tables (dinfo);
597 }
598 }
599
600 static gboolean
gst_jpeg_dec_set_format(GstVideoDecoder * dec,GstVideoCodecState * state)601 gst_jpeg_dec_set_format (GstVideoDecoder * dec, GstVideoCodecState * state)
602 {
603 GstJpegDec *jpeg = GST_JPEG_DEC (dec);
604
605 if (jpeg->input_state)
606 gst_video_codec_state_unref (jpeg->input_state);
607 jpeg->input_state = gst_video_codec_state_ref (state);
608
609 return TRUE;
610 }
611
612
613 /* yuk */
614 static void
hresamplecpy1(guint8 * dest,const guint8 * src,guint len)615 hresamplecpy1 (guint8 * dest, const guint8 * src, guint len)
616 {
617 gint i;
618
619 for (i = 0; i < len; ++i) {
620 /* equivalent to: dest[i] = src[i << 1] */
621 *dest = *src;
622 ++dest;
623 ++src;
624 ++src;
625 }
626 }
627
628 static void
gst_jpeg_dec_free_buffers(GstJpegDec * dec)629 gst_jpeg_dec_free_buffers (GstJpegDec * dec)
630 {
631 gint i;
632
633 for (i = 0; i < 16; i++) {
634 g_free (dec->idr_y[i]);
635 g_free (dec->idr_u[i]);
636 g_free (dec->idr_v[i]);
637 dec->idr_y[i] = NULL;
638 dec->idr_u[i] = NULL;
639 dec->idr_v[i] = NULL;
640 }
641
642 dec->idr_width_allocated = 0;
643 }
644
645 static inline gboolean
gst_jpeg_dec_ensure_buffers(GstJpegDec * dec,guint maxrowbytes)646 gst_jpeg_dec_ensure_buffers (GstJpegDec * dec, guint maxrowbytes)
647 {
648 gint i;
649
650 if (G_LIKELY (dec->idr_width_allocated == maxrowbytes))
651 return TRUE;
652
653 /* FIXME: maybe just alloc one or three blocks altogether? */
654 for (i = 0; i < 16; i++) {
655 dec->idr_y[i] = g_try_realloc (dec->idr_y[i], maxrowbytes);
656 dec->idr_u[i] = g_try_realloc (dec->idr_u[i], maxrowbytes);
657 dec->idr_v[i] = g_try_realloc (dec->idr_v[i], maxrowbytes);
658
659 if (G_UNLIKELY (!dec->idr_y[i] || !dec->idr_u[i] || !dec->idr_v[i])) {
660 GST_WARNING_OBJECT (dec, "out of memory, i=%d, bytes=%u", i, maxrowbytes);
661 return FALSE;
662 }
663 }
664
665 dec->idr_width_allocated = maxrowbytes;
666 GST_LOG_OBJECT (dec, "allocated temp memory, %u bytes/row", maxrowbytes);
667 return TRUE;
668 }
669
670 static void
gst_jpeg_dec_decode_grayscale(GstJpegDec * dec,GstVideoFrame * frame,guint field,guint num_fields)671 gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, GstVideoFrame * frame,
672 guint field, guint num_fields)
673 {
674 guchar *rows[16];
675 guchar **scanarray[1] = { rows };
676 gint i, j, k;
677 gint lines;
678 guint8 *base[1];
679 gint width, height;
680 gint pstride, rstride;
681
682 GST_DEBUG_OBJECT (dec, "indirect decoding of grayscale");
683
684 width = GST_VIDEO_FRAME_WIDTH (frame);
685 height = GST_VIDEO_FRAME_HEIGHT (frame) / num_fields;
686
687 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
688 return;
689
690 base[0] = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
691 if (field == 2) {
692 base[0] += GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
693 }
694
695 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
696 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0) * num_fields;
697
698 memcpy (rows, dec->idr_y, 16 * sizeof (gpointer));
699
700 i = 0;
701 while (i < height) {
702 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
703 if (G_LIKELY (lines > 0)) {
704 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
705 gint p;
706
707 p = 0;
708 for (k = 0; k < width; k++) {
709 base[0][p] = rows[j][k];
710 p += pstride;
711 }
712 base[0] += rstride;
713 }
714 } else {
715 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
716 }
717 }
718 }
719
720 static void
gst_jpeg_dec_decode_rgb(GstJpegDec * dec,GstVideoFrame * frame,guint field,guint num_fields)721 gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame,
722 guint field, guint num_fields)
723 {
724 guchar *r_rows[16], *g_rows[16], *b_rows[16];
725 guchar **scanarray[3] = { r_rows, g_rows, b_rows };
726 gint i, j, k;
727 gint lines;
728 guint8 *base[3];
729 guint pstride, rstride;
730 gint width, height;
731
732 GST_DEBUG_OBJECT (dec, "indirect decoding of RGB");
733
734 width = GST_VIDEO_FRAME_WIDTH (frame);
735 height = GST_VIDEO_FRAME_HEIGHT (frame) / num_fields;
736
737 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
738 return;
739
740 for (i = 0; i < 3; i++) {
741 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
742 if (field == 2)
743 base[i] += GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
744 }
745
746 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
747 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0) * num_fields;
748
749 memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer));
750 memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer));
751 memcpy (b_rows, dec->idr_v, 16 * sizeof (gpointer));
752
753 i = 0;
754 while (i < height) {
755 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
756 if (G_LIKELY (lines > 0)) {
757 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
758 gint p;
759
760 p = 0;
761 for (k = 0; k < width; k++) {
762 base[0][p] = r_rows[j][k];
763 base[1][p] = g_rows[j][k];
764 base[2][p] = b_rows[j][k];
765 p += pstride;
766 }
767 base[0] += rstride;
768 base[1] += rstride;
769 base[2] += rstride;
770 }
771 } else {
772 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
773 }
774 }
775 }
776
777 static void
gst_jpeg_dec_decode_indirect(GstJpegDec * dec,GstVideoFrame * frame,gint r_v,gint r_h,gint comp,guint field,guint num_fields)778 gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame, gint r_v,
779 gint r_h, gint comp, guint field, guint num_fields)
780 {
781 guchar *y_rows[16], *u_rows[16], *v_rows[16];
782 guchar **scanarray[3] = { y_rows, u_rows, v_rows };
783 gint i, j, k;
784 gint lines;
785 guchar *base[3], *last[3];
786 gint rowsize[3], stride[3];
787 gint width, height;
788
789 GST_DEBUG_OBJECT (dec,
790 "unadvantageous width or r_h, taking slow route involving memcpy");
791
792 width = GST_VIDEO_FRAME_WIDTH (frame);
793 height = GST_VIDEO_FRAME_HEIGHT (frame);
794
795 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
796 return;
797
798 for (i = 0; i < 3; i++) {
799 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
800 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i) * num_fields;
801 rowsize[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
802 /* make sure we don't make jpeglib write beyond our buffer,
803 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
804 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
805 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
806
807 if (field == 2) {
808 base[i] += GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
809 }
810 }
811
812 memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer));
813 memcpy (u_rows, dec->idr_u, 16 * sizeof (gpointer));
814 memcpy (v_rows, dec->idr_v, 16 * sizeof (gpointer));
815
816 /* fill chroma components for grayscale */
817 if (comp == 1) {
818 GST_DEBUG_OBJECT (dec, "grayscale, filling chroma");
819 for (i = 0; i < 16; i++) {
820 memset (u_rows[i], GST_ROUND_UP_32 (width), 0x80);
821 memset (v_rows[i], GST_ROUND_UP_32 (width), 0x80);
822 }
823 }
824
825 for (i = 0; i < height; i += r_v * DCTSIZE) {
826 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, r_v * DCTSIZE);
827 if (G_LIKELY (lines > 0)) {
828 for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) {
829 if (G_LIKELY (base[0] <= last[0])) {
830 memcpy (base[0], y_rows[j], rowsize[0]);
831 base[0] += stride[0];
832 }
833 if (r_v == 2) {
834 if (G_LIKELY (base[0] <= last[0])) {
835 memcpy (base[0], y_rows[j + 1], rowsize[0]);
836 base[0] += stride[0];
837 }
838 }
839 if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) {
840 if (r_h == 2) {
841 memcpy (base[1], u_rows[k], rowsize[1]);
842 memcpy (base[2], v_rows[k], rowsize[2]);
843 } else if (r_h == 1) {
844 hresamplecpy1 (base[1], u_rows[k], rowsize[1]);
845 hresamplecpy1 (base[2], v_rows[k], rowsize[2]);
846 } else {
847 /* FIXME: implement (at least we avoid crashing by doing nothing) */
848 }
849 }
850
851 if (r_v == 2 || (k & 1) != 0) {
852 base[1] += stride[1];
853 base[2] += stride[2];
854 }
855 }
856 } else {
857 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
858 }
859 }
860 }
861
862 static GstFlowReturn
gst_jpeg_dec_decode_direct(GstJpegDec * dec,GstVideoFrame * frame,guint field,guint num_fields)863 gst_jpeg_dec_decode_direct (GstJpegDec * dec, GstVideoFrame * frame,
864 guint field, guint num_fields)
865 {
866 guchar **line[3]; /* the jpeg line buffer */
867 guchar *y[4 * DCTSIZE] = { NULL, }; /* alloc enough for the lines */
868 guchar *u[4 * DCTSIZE] = { NULL, }; /* r_v will be <4 */
869 guchar *v[4 * DCTSIZE] = { NULL, };
870 gint i, j;
871 gint lines, v_samp[3];
872 guchar *base[3], *last[3];
873 gint stride[3];
874 guint height, field_height;
875
876 line[0] = y;
877 line[1] = u;
878 line[2] = v;
879
880 v_samp[0] = dec->cinfo.comp_info[0].v_samp_factor;
881 v_samp[1] = dec->cinfo.comp_info[1].v_samp_factor;
882 v_samp[2] = dec->cinfo.comp_info[2].v_samp_factor;
883
884 if (G_UNLIKELY (v_samp[0] > 2 || v_samp[1] > 2 || v_samp[2] > 2))
885 goto format_not_supported;
886
887 height = field_height = GST_VIDEO_FRAME_HEIGHT (frame);
888
889 /* XXX: division by 2 here might not be a good idea yes. But we are doing this
890 * already in gst_jpeg_dec_handle_frame() for interlaced jpeg */
891 if (num_fields == 2)
892 field_height /= 2;
893
894 for (i = 0; i < 3; i++) {
895 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
896 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i) * num_fields;
897 /* make sure we don't make jpeglib write beyond our buffer,
898 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
899 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
900 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
901
902 if (field == 2) {
903 base[i] += GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
904 }
905 }
906
907 if (field_height % (v_samp[0] * DCTSIZE) && (dec->scratch_size < stride[0])) {
908 g_free (dec->scratch);
909 dec->scratch = g_malloc (stride[0]);
910 dec->scratch_size = stride[0];
911 }
912
913 /* let jpeglib decode directly into our final buffer */
914 GST_DEBUG_OBJECT (dec, "decoding directly into output buffer");
915
916 #ifdef JCS_EXTENSIONS
917 if (dec->format_convert) {
918 gint row_stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
919 guchar *bufbase = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
920
921 if (num_fields == 2) {
922 row_stride *= 2;
923 }
924
925 if (field == 2) {
926 bufbase += GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
927 }
928
929 while (dec->cinfo.output_scanline < dec->cinfo.output_height) {
930 JSAMPARRAY buffer = { &bufbase, };
931 jpeg_read_scanlines (&dec->cinfo, buffer, 1);
932 bufbase += row_stride;
933 }
934 } else
935 #endif
936 {
937 for (i = 0; i < height; i += v_samp[0] * DCTSIZE) {
938 for (j = 0; j < (v_samp[0] * DCTSIZE); ++j) {
939 /* Y */
940 line[0][j] = base[0] + (i + j) * stride[0];
941 if (G_UNLIKELY (line[0][j] > last[0]))
942 line[0][j] = dec->scratch;
943 /* U */
944 if (v_samp[1] == v_samp[0]) {
945 line[1][j] = base[1] + ((i + j) / 2) * stride[1];
946 } else if (j < (v_samp[1] * DCTSIZE)) {
947 line[1][j] = base[1] + ((i / 2) + j) * stride[1];
948 }
949 if (G_UNLIKELY (line[1][j] > last[1]))
950 line[1][j] = dec->scratch;
951 /* V */
952 if (v_samp[2] == v_samp[0]) {
953 line[2][j] = base[2] + ((i + j) / 2) * stride[2];
954 } else if (j < (v_samp[2] * DCTSIZE)) {
955 line[2][j] = base[2] + ((i / 2) + j) * stride[2];
956 }
957 if (G_UNLIKELY (line[2][j] > last[2]))
958 line[2][j] = dec->scratch;
959 }
960
961 lines = jpeg_read_raw_data (&dec->cinfo, line, v_samp[0] * DCTSIZE);
962 if (G_UNLIKELY (!lines)) {
963 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
964 }
965 }
966 }
967 return GST_FLOW_OK;
968
969 format_not_supported:
970 {
971 gboolean ret = GST_FLOW_OK;
972
973 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
974 (_("Failed to decode JPEG image")),
975 ("Unsupported subsampling schema: v_samp factors: %u %u %u", v_samp[0],
976 v_samp[1], v_samp[2]), ret);
977
978 return ret;
979 }
980 }
981
982 #ifdef JCS_EXTENSIONS
983 static J_COLOR_SPACE
gst_fmt_to_jpeg_turbo_ext_fmt(GstVideoFormat gstfmt)984 gst_fmt_to_jpeg_turbo_ext_fmt (GstVideoFormat gstfmt)
985 {
986 switch (gstfmt) {
987 case GST_VIDEO_FORMAT_RGB:
988 return JCS_EXT_RGB;
989 case GST_VIDEO_FORMAT_RGBx:
990 return JCS_EXT_RGBX;
991 case GST_VIDEO_FORMAT_xRGB:
992 return JCS_EXT_XRGB;
993 case GST_VIDEO_FORMAT_RGBA:
994 return JCS_EXT_RGBA;
995 case GST_VIDEO_FORMAT_ARGB:
996 return JCS_EXT_ARGB;
997 case GST_VIDEO_FORMAT_BGR:
998 return JCS_EXT_BGR;
999 case GST_VIDEO_FORMAT_BGRx:
1000 return JCS_EXT_BGRX;
1001 case GST_VIDEO_FORMAT_xBGR:
1002 return JCS_EXT_XBGR;
1003 case GST_VIDEO_FORMAT_BGRA:
1004 return JCS_EXT_BGRA;
1005 case GST_VIDEO_FORMAT_ABGR:
1006 return JCS_EXT_ABGR;
1007 default:
1008 return 0;
1009 }
1010 }
1011
1012 static void
gst_jpeg_turbo_parse_ext_fmt_convert(GstJpegDec * dec,gint * clrspc)1013 gst_jpeg_turbo_parse_ext_fmt_convert (GstJpegDec * dec, gint * clrspc)
1014 {
1015 GstCaps *peer_caps, *dec_caps;
1016
1017 dec_caps = gst_static_caps_get (&gst_jpeg_dec_src_pad_template.static_caps);
1018 peer_caps =
1019 gst_pad_peer_query_caps (GST_VIDEO_DECODER_SRC_PAD (dec), dec_caps);
1020 gst_caps_unref (dec_caps);
1021
1022 GST_DEBUG ("Received caps from peer: %" GST_PTR_FORMAT, peer_caps);
1023 dec->format_convert = FALSE;
1024 if (!gst_caps_is_empty (peer_caps)) {
1025 GstStructure *peerstruct;
1026 const gchar *peerformat;
1027 GstVideoFormat peerfmt;
1028
1029 if (!gst_caps_is_fixed (peer_caps))
1030 peer_caps = gst_caps_fixate (peer_caps);
1031
1032 peerstruct = gst_caps_get_structure (peer_caps, 0);
1033 peerformat = gst_structure_get_string (peerstruct, "format");
1034 peerfmt = gst_video_format_from_string (peerformat);
1035
1036 switch (peerfmt) {
1037 case GST_VIDEO_FORMAT_RGB:
1038 case GST_VIDEO_FORMAT_RGBx:
1039 case GST_VIDEO_FORMAT_xRGB:
1040 case GST_VIDEO_FORMAT_RGBA:
1041 case GST_VIDEO_FORMAT_ARGB:
1042 case GST_VIDEO_FORMAT_BGR:
1043 case GST_VIDEO_FORMAT_BGRx:
1044 case GST_VIDEO_FORMAT_xBGR:
1045 case GST_VIDEO_FORMAT_BGRA:
1046 case GST_VIDEO_FORMAT_ABGR:
1047 if (clrspc)
1048 *clrspc = JCS_RGB;
1049 dec->format = peerfmt;
1050 dec->format_convert = TRUE;
1051 dec->libjpeg_ext_format = gst_fmt_to_jpeg_turbo_ext_fmt (peerfmt);
1052 break;
1053 default:
1054 break;
1055 }
1056 }
1057 gst_caps_unref (peer_caps);
1058 GST_DEBUG_OBJECT (dec, "format_convert=%d", dec->format_convert);
1059 }
1060 #endif
1061
1062 static void
gst_jpeg_dec_negotiate(GstJpegDec * dec,gint width,gint height,gint clrspc,gboolean interlaced)1063 gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc,
1064 gboolean interlaced)
1065 {
1066 GstVideoCodecState *outstate;
1067 GstVideoInfo *info;
1068 GstVideoFormat format;
1069
1070 #ifdef JCS_EXTENSIONS
1071 if (dec->format_convert) {
1072 format = dec->format;
1073 } else
1074 #endif
1075 {
1076 switch (clrspc) {
1077 case JCS_RGB:
1078 format = GST_VIDEO_FORMAT_RGB;
1079 break;
1080 case JCS_GRAYSCALE:
1081 format = GST_VIDEO_FORMAT_GRAY8;
1082 break;
1083 default:
1084 format = GST_VIDEO_FORMAT_I420;
1085 break;
1086 }
1087 }
1088
1089 /* Compare to currently configured output state */
1090 outstate = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (dec));
1091 if (outstate) {
1092 info = &outstate->info;
1093
1094 if (width == GST_VIDEO_INFO_WIDTH (info) &&
1095 height == GST_VIDEO_INFO_HEIGHT (info) &&
1096 format == GST_VIDEO_INFO_FORMAT (info)) {
1097 gst_video_codec_state_unref (outstate);
1098 return;
1099 }
1100 gst_video_codec_state_unref (outstate);
1101 }
1102 #ifdef JCS_EXTENSIONS
1103 /* Determine if libjpeg-turbo direct format conversion can be used
1104 * with current caps and if so, adjust $dec to enable it and $clrspc
1105 * accordingly. */
1106 gst_jpeg_turbo_parse_ext_fmt_convert (dec, &clrspc);
1107 #endif
1108
1109 outstate =
1110 gst_video_decoder_set_output_state (GST_VIDEO_DECODER (dec), format,
1111 width, height, dec->input_state);
1112
1113 switch (clrspc) {
1114 case JCS_RGB:
1115 case JCS_GRAYSCALE:
1116 break;
1117 default:
1118 /* aka JPEG chroma siting */
1119 outstate->info.chroma_site = GST_VIDEO_CHROMA_SITE_NONE;
1120
1121 outstate->info.colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255;
1122 outstate->info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
1123 outstate->info.colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN;
1124 outstate->info.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
1125 break;
1126 }
1127
1128 if (interlaced) {
1129 outstate->info.interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
1130 GST_VIDEO_INFO_FIELD_ORDER (&outstate->info) =
1131 GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST;
1132 }
1133
1134 gst_video_codec_state_unref (outstate);
1135
1136 gst_video_decoder_negotiate (GST_VIDEO_DECODER (dec));
1137
1138 GST_DEBUG_OBJECT (dec, "max_v_samp_factor=%d", dec->cinfo.max_v_samp_factor);
1139 GST_DEBUG_OBJECT (dec, "max_h_samp_factor=%d", dec->cinfo.max_h_samp_factor);
1140 }
1141
1142 static GstFlowReturn
gst_jpeg_dec_prepare_decode(GstJpegDec * dec)1143 gst_jpeg_dec_prepare_decode (GstJpegDec * dec)
1144 {
1145 G_GNUC_UNUSED GstFlowReturn ret;
1146 guint r_h, r_v, hdr_ok;
1147
1148 /* read header */
1149 hdr_ok = jpeg_read_header (&dec->cinfo, TRUE);
1150 if (G_UNLIKELY (hdr_ok != JPEG_HEADER_OK)) {
1151 GST_WARNING_OBJECT (dec, "reading the header failed, %d", hdr_ok);
1152 }
1153
1154 GST_LOG_OBJECT (dec, "num_components=%d", dec->cinfo.num_components);
1155 GST_LOG_OBJECT (dec, "jpeg_color_space=%d", dec->cinfo.jpeg_color_space);
1156
1157 if (!dec->cinfo.num_components || !dec->cinfo.comp_info)
1158 goto components_not_supported;
1159
1160 r_h = dec->cinfo.comp_info[0].h_samp_factor;
1161 r_v = dec->cinfo.comp_info[0].v_samp_factor;
1162
1163 GST_LOG_OBJECT (dec, "r_h = %d, r_v = %d", r_h, r_v);
1164
1165 if (dec->cinfo.num_components > 3)
1166 goto components_not_supported;
1167
1168 /* verify color space expectation to avoid going *boom* or bogus output */
1169 if (dec->cinfo.jpeg_color_space != JCS_YCbCr &&
1170 dec->cinfo.jpeg_color_space != JCS_GRAYSCALE &&
1171 dec->cinfo.jpeg_color_space != JCS_RGB)
1172 goto unsupported_colorspace;
1173
1174 #ifndef GST_DISABLE_GST_DEBUG
1175 {
1176 gint i;
1177
1178 for (i = 0; i < dec->cinfo.num_components; ++i) {
1179 GST_LOG_OBJECT (dec, "[%d] h_samp_factor=%d, v_samp_factor=%d, cid=%d",
1180 i, dec->cinfo.comp_info[i].h_samp_factor,
1181 dec->cinfo.comp_info[i].v_samp_factor,
1182 dec->cinfo.comp_info[i].component_id);
1183 }
1184 }
1185 #endif
1186
1187 /* prepare for raw output */
1188 dec->cinfo.do_fancy_upsampling = FALSE;
1189 dec->cinfo.do_block_smoothing = FALSE;
1190 dec->cinfo.dct_method = dec->idct_method;
1191 #ifdef JCS_EXTENSIONS
1192 gst_jpeg_turbo_parse_ext_fmt_convert (dec, NULL);
1193 if (dec->format_convert) {
1194 dec->cinfo.out_color_space = dec->libjpeg_ext_format;
1195 dec->cinfo.raw_data_out = FALSE;
1196 } else
1197 #endif
1198 {
1199 dec->cinfo.out_color_space = dec->cinfo.jpeg_color_space;
1200 dec->cinfo.raw_data_out = TRUE;
1201 }
1202
1203 GST_LOG_OBJECT (dec, "starting decompress");
1204 guarantee_huff_tables (&dec->cinfo);
1205 if (!jpeg_start_decompress (&dec->cinfo)) {
1206 GST_WARNING_OBJECT (dec, "failed to start decompression cycle");
1207 }
1208
1209 /* sanity checks to get safe and reasonable output */
1210 switch (dec->cinfo.jpeg_color_space) {
1211 case JCS_GRAYSCALE:
1212 if (dec->cinfo.num_components != 1)
1213 goto invalid_yuvrgbgrayscale;
1214 break;
1215 case JCS_RGB:
1216 if (dec->cinfo.num_components != 3 || dec->cinfo.max_v_samp_factor > 1 ||
1217 dec->cinfo.max_h_samp_factor > 1)
1218 goto invalid_yuvrgbgrayscale;
1219 break;
1220 case JCS_YCbCr:
1221 if (dec->cinfo.num_components != 3 ||
1222 r_v > 2 || r_v < dec->cinfo.comp_info[0].v_samp_factor ||
1223 r_v < dec->cinfo.comp_info[1].v_samp_factor ||
1224 r_h < dec->cinfo.comp_info[0].h_samp_factor ||
1225 r_h < dec->cinfo.comp_info[1].h_samp_factor)
1226 goto invalid_yuvrgbgrayscale;
1227 break;
1228 default:
1229 g_assert_not_reached ();
1230 break;
1231 }
1232
1233 if (G_UNLIKELY (dec->cinfo.output_width < MIN_WIDTH ||
1234 dec->cinfo.output_width > MAX_WIDTH ||
1235 dec->cinfo.output_height < MIN_HEIGHT ||
1236 dec->cinfo.output_height > MAX_HEIGHT))
1237 goto wrong_size;
1238
1239 return GST_FLOW_OK;
1240
1241 /* ERRORS */
1242 wrong_size:
1243 {
1244 ret = GST_FLOW_ERROR;
1245 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1246 (_("Failed to decode JPEG image")),
1247 ("Picture is too small or too big (%ux%u)", dec->cinfo.output_width,
1248 dec->cinfo.output_height), ret);
1249 return GST_FLOW_ERROR;
1250 }
1251 components_not_supported:
1252 {
1253 ret = GST_FLOW_ERROR;
1254 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1255 (_("Failed to decode JPEG image")),
1256 ("number of components not supported: %d (max 3)",
1257 dec->cinfo.num_components), ret);
1258 jpeg_abort_decompress (&dec->cinfo);
1259 return GST_FLOW_ERROR;
1260 }
1261 unsupported_colorspace:
1262 {
1263 ret = GST_FLOW_ERROR;
1264 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1265 (_("Failed to decode JPEG image")),
1266 ("Picture has unknown or unsupported colourspace"), ret);
1267 jpeg_abort_decompress (&dec->cinfo);
1268 return GST_FLOW_ERROR;
1269 }
1270 invalid_yuvrgbgrayscale:
1271 {
1272 ret = GST_FLOW_ERROR;
1273 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1274 (_("Failed to decode JPEG image")),
1275 ("Picture is corrupt or unhandled YUV/RGB/grayscale layout"), ret);
1276 jpeg_abort_decompress (&dec->cinfo);
1277 return GST_FLOW_ERROR;
1278 }
1279 }
1280
1281 static GstFlowReturn
gst_jpeg_dec_decode(GstJpegDec * dec,GstVideoFrame * vframe,guint width,guint height,guint field,guint num_fields)1282 gst_jpeg_dec_decode (GstJpegDec * dec, GstVideoFrame * vframe, guint width,
1283 guint height, guint field, guint num_fields)
1284 {
1285 GstFlowReturn ret = GST_FLOW_OK;
1286
1287 if (dec->cinfo.jpeg_color_space == JCS_RGB) {
1288 gst_jpeg_dec_decode_rgb (dec, vframe, field, num_fields);
1289 } else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
1290 gst_jpeg_dec_decode_grayscale (dec, vframe, field, num_fields);
1291 } else {
1292 GST_LOG_OBJECT (dec, "decompressing (required scanline buffer height = %u)",
1293 dec->cinfo.rec_outbuf_height);
1294
1295 /* For some widths jpeglib requires more horizontal padding than I420
1296 * provides. In those cases we need to decode into separate buffers and then
1297 * copy over the data into our final picture buffer, otherwise jpeglib might
1298 * write over the end of a line into the beginning of the next line,
1299 * resulting in blocky artifacts on the left side of the picture. */
1300 if (G_UNLIKELY (width % (dec->cinfo.max_h_samp_factor * DCTSIZE) != 0
1301 || dec->cinfo.comp_info[0].h_samp_factor != 2
1302 || dec->cinfo.comp_info[1].h_samp_factor != 1
1303 || dec->cinfo.comp_info[2].h_samp_factor != 1)) {
1304 GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
1305 "indirect decoding using extra buffer copy");
1306 gst_jpeg_dec_decode_indirect (dec, vframe,
1307 dec->cinfo.comp_info[0].v_samp_factor,
1308 dec->cinfo.comp_info[0].h_samp_factor, dec->cinfo.num_components,
1309 field, num_fields);
1310 } else {
1311 ret = gst_jpeg_dec_decode_direct (dec, vframe, field, num_fields);
1312 }
1313 }
1314
1315 GST_LOG_OBJECT (dec, "decompressing finished: %s", gst_flow_get_name (ret));
1316
1317 if (G_UNLIKELY (ret != GST_FLOW_OK)) {
1318 jpeg_abort_decompress (&dec->cinfo);
1319 } else {
1320 jpeg_finish_decompress (&dec->cinfo);
1321 }
1322
1323 return ret;
1324 }
1325
1326 static GstFlowReturn
gst_jpeg_dec_handle_frame(GstVideoDecoder * bdec,GstVideoCodecFrame * frame)1327 gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec, GstVideoCodecFrame * frame)
1328 {
1329 GstFlowReturn ret = GST_FLOW_OK;
1330 GstJpegDec *dec = (GstJpegDec *) bdec;
1331 GstVideoFrame vframe;
1332 gint num_fields; /* number of fields (1 or 2) */
1333 gint output_height; /* height of output image (one or two fields) */
1334 gint height; /* height of current frame (whole image or a field) */
1335 gint width;
1336 guint code;
1337 gboolean need_unmap = TRUE;
1338 GstVideoCodecState *state = NULL;
1339 gboolean release_frame = TRUE;
1340 gboolean has_eoi;
1341 guint8 *data;
1342 gsize nbytes;
1343
1344 if (!gst_buffer_map (frame->input_buffer, &dec->current_frame_map,
1345 GST_MAP_READ))
1346 goto map_failed;
1347
1348 data = dec->current_frame_map.data;
1349 nbytes = dec->current_frame_map.size;
1350 if (nbytes < 2)
1351 goto need_more_data;
1352 has_eoi = ((data[nbytes - 2] == 0xff) && (data[nbytes - 1] == 0xd9));
1353
1354 /* some cameras fail to send an end-of-image marker (EOI),
1355 * add it if that is the case. */
1356 if (!has_eoi) {
1357 GstMapInfo map;
1358 GstBuffer *eoibuf = gst_buffer_new_and_alloc (2);
1359
1360 /* unmap, will add EOI and remap at the end */
1361 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1362
1363 gst_buffer_map (eoibuf, &map, GST_MAP_WRITE);
1364 map.data[0] = 0xff;
1365 map.data[1] = 0xd9;
1366 gst_buffer_unmap (eoibuf, &map);
1367
1368 /* append to input buffer, and remap */
1369 frame->input_buffer = gst_buffer_append (frame->input_buffer, eoibuf);
1370
1371 gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
1372 GST_DEBUG ("fixup EOI marker added");
1373 }
1374
1375 dec->current_frame = frame;
1376 dec->cinfo.src->next_input_byte = dec->current_frame_map.data;
1377 dec->cinfo.src->bytes_in_buffer = dec->current_frame_map.size;
1378
1379 if (setjmp (dec->jerr.setjmp_buffer)) {
1380 code = dec->jerr.pub.msg_code;
1381
1382 if (code == JERR_INPUT_EOF) {
1383 GST_DEBUG ("jpeg input EOF error, we probably need more data");
1384 goto need_more_data;
1385 }
1386 goto decode_error;
1387 }
1388
1389 /* read header and check values */
1390 ret = gst_jpeg_dec_prepare_decode (dec);
1391 if (G_UNLIKELY (ret == GST_FLOW_ERROR))
1392 goto done;
1393
1394 width = dec->cinfo.output_width;
1395 height = dec->cinfo.output_height;
1396
1397 /* is it interlaced MJPEG? (we really don't want to scan the jpeg data
1398 * to see if there are two SOF markers in the packet to detect this) */
1399 if (gst_video_decoder_get_packetized (bdec) &&
1400 dec->input_state &&
1401 dec->input_state->info.height > height &&
1402 dec->input_state->info.height <= (height * 2)
1403 && dec->input_state->info.width == width) {
1404 GST_LOG_OBJECT (dec,
1405 "looks like an interlaced image: "
1406 "input width/height of %dx%d with JPEG frame width/height of %dx%d",
1407 dec->input_state->info.width, dec->input_state->info.height, width,
1408 height);
1409 output_height = dec->input_state->info.height;
1410 height = dec->input_state->info.height / 2;
1411 num_fields = 2;
1412 GST_LOG_OBJECT (dec, "field height=%d", height);
1413 } else {
1414 output_height = height;
1415 num_fields = 1;
1416 }
1417
1418 gst_jpeg_dec_negotiate (dec, width, output_height,
1419 dec->cinfo.jpeg_color_space, num_fields == 2);
1420
1421 state = gst_video_decoder_get_output_state (bdec);
1422 ret = gst_video_decoder_allocate_output_frame (bdec, frame);
1423 if (G_UNLIKELY (ret != GST_FLOW_OK))
1424 goto alloc_failed;
1425
1426 if (!gst_video_frame_map (&vframe, &state->info, frame->output_buffer,
1427 GST_MAP_READWRITE))
1428 goto alloc_failed;
1429
1430 if (setjmp (dec->jerr.setjmp_buffer)) {
1431 code = dec->jerr.pub.msg_code;
1432 gst_video_frame_unmap (&vframe);
1433 goto decode_error;
1434 }
1435
1436 GST_LOG_OBJECT (dec, "width %d, height %d, fields %d", width, output_height,
1437 num_fields);
1438
1439 ret = gst_jpeg_dec_decode (dec, &vframe, width, height, 1, num_fields);
1440 if (G_UNLIKELY (ret != GST_FLOW_OK)) {
1441 gst_video_frame_unmap (&vframe);
1442 goto decode_failed;
1443 }
1444
1445 if (setjmp (dec->jerr.setjmp_buffer)) {
1446 code = dec->jerr.pub.msg_code;
1447 gst_video_frame_unmap (&vframe);
1448 goto decode_error;
1449 }
1450
1451 /* decode second field if there is one */
1452 if (num_fields == 2) {
1453 GstVideoFormat field2_format;
1454
1455 /* Checked above before setting num_fields to 2 */
1456 g_assert (dec->input_state != NULL);
1457
1458 /* skip any chunk or padding bytes before the next SOI marker; both fields
1459 * are in one single buffer here, so direct access should be fine here */
1460 while (dec->jsrc.pub.bytes_in_buffer > 2 &&
1461 GST_READ_UINT16_BE (dec->jsrc.pub.next_input_byte) != 0xffd8) {
1462 --dec->jsrc.pub.bytes_in_buffer;
1463 ++dec->jsrc.pub.next_input_byte;
1464 }
1465
1466 if (gst_jpeg_dec_prepare_decode (dec) != GST_FLOW_OK) {
1467 GST_WARNING_OBJECT (dec, "problem reading jpeg header of 2nd field");
1468 /* FIXME: post a warning message here? */
1469 gst_video_frame_unmap (&vframe);
1470 goto decode_failed;
1471 }
1472
1473 /* check if format has changed for the second field */
1474 #ifdef JCS_EXTENSIONS
1475 if (dec->format_convert) {
1476 field2_format = dec->format;
1477 } else
1478 #endif
1479 {
1480 switch (dec->cinfo.jpeg_color_space) {
1481 case JCS_RGB:
1482 field2_format = GST_VIDEO_FORMAT_RGB;
1483 break;
1484 case JCS_GRAYSCALE:
1485 field2_format = GST_VIDEO_FORMAT_GRAY8;
1486 break;
1487 default:
1488 field2_format = GST_VIDEO_FORMAT_I420;
1489 break;
1490 }
1491 }
1492
1493 GST_LOG_OBJECT (dec,
1494 "got for second field of interlaced image: "
1495 "input width/height of %dx%d with JPEG frame width/height of %dx%d",
1496 dec->input_state->info.width, dec->input_state->info.height,
1497 dec->cinfo.output_width, dec->cinfo.output_height);
1498
1499 if (dec->cinfo.output_width != GST_VIDEO_INFO_WIDTH (&state->info) ||
1500 GST_VIDEO_INFO_HEIGHT (&state->info) <= dec->cinfo.output_height ||
1501 GST_VIDEO_INFO_HEIGHT (&state->info) > (dec->cinfo.output_height * 2) ||
1502 field2_format != GST_VIDEO_INFO_FORMAT (&state->info)) {
1503 GST_WARNING_OBJECT (dec, "second field has different format than first");
1504 gst_video_frame_unmap (&vframe);
1505 goto decode_failed;
1506 }
1507
1508 ret = gst_jpeg_dec_decode (dec, &vframe, width, height, 2, 2);
1509 if (G_UNLIKELY (ret != GST_FLOW_OK)) {
1510 gst_video_frame_unmap (&vframe);
1511 goto decode_failed;
1512 }
1513 }
1514 gst_video_frame_unmap (&vframe);
1515
1516 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1517 ret = gst_video_decoder_finish_frame (bdec, frame);
1518 release_frame = FALSE;
1519 need_unmap = FALSE;
1520
1521 done:
1522
1523 exit:
1524
1525 if (need_unmap)
1526 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1527
1528 if (release_frame)
1529 gst_video_decoder_release_frame (bdec, frame);
1530
1531 if (state)
1532 gst_video_codec_state_unref (state);
1533
1534 return ret;
1535
1536 /* special cases */
1537 need_more_data:
1538 {
1539 GST_LOG_OBJECT (dec, "we need more data");
1540 ret = GST_FLOW_OK;
1541 goto exit;
1542 }
1543 /* ERRORS */
1544 map_failed:
1545 {
1546 GST_ELEMENT_ERROR (dec, RESOURCE, READ, (_("Failed to read memory")),
1547 ("gst_buffer_map() failed for READ access"));
1548 ret = GST_FLOW_ERROR;
1549 goto exit;
1550 }
1551 decode_error:
1552 {
1553 gchar err_msg[JMSG_LENGTH_MAX];
1554
1555 dec->jerr.pub.format_message ((j_common_ptr) (&dec->cinfo), err_msg);
1556
1557 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1558 (_("Failed to decode JPEG image")), ("Decode error #%u: %s", code,
1559 err_msg), ret);
1560
1561 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1562 gst_video_decoder_drop_frame (bdec, frame);
1563 release_frame = FALSE;
1564 need_unmap = FALSE;
1565 jpeg_abort_decompress (&dec->cinfo);
1566
1567 goto done;
1568 }
1569 decode_failed:
1570 {
1571 /* already posted an error message */
1572 goto done;
1573 }
1574 alloc_failed:
1575 {
1576 const gchar *reason;
1577
1578 reason = gst_flow_get_name (ret);
1579
1580 GST_DEBUG_OBJECT (dec, "failed to alloc buffer, reason %s", reason);
1581 /* Reset for next time */
1582 jpeg_abort_decompress (&dec->cinfo);
1583 if (ret != GST_FLOW_EOS && ret != GST_FLOW_FLUSHING &&
1584 ret != GST_FLOW_NOT_LINKED) {
1585 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1586 (_("Failed to decode JPEG image")),
1587 ("Buffer allocation failed, reason: %s", reason), ret);
1588 jpeg_abort_decompress (&dec->cinfo);
1589 }
1590 goto exit;
1591 }
1592 }
1593
1594 static gboolean
gst_jpeg_dec_decide_allocation(GstVideoDecoder * bdec,GstQuery * query)1595 gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query)
1596 {
1597 GstBufferPool *pool = NULL;
1598 GstStructure *config;
1599
1600 if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query))
1601 return FALSE;
1602
1603 if (gst_query_get_n_allocation_pools (query) > 0)
1604 gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
1605
1606 if (pool == NULL)
1607 return FALSE;
1608
1609 config = gst_buffer_pool_get_config (pool);
1610 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
1611 gst_buffer_pool_config_add_option (config,
1612 GST_BUFFER_POOL_OPTION_VIDEO_META);
1613 }
1614 gst_buffer_pool_set_config (pool, config);
1615 gst_object_unref (pool);
1616
1617 return TRUE;
1618 }
1619
1620 static gboolean
gst_jpeg_dec_sink_event(GstVideoDecoder * bdec,GstEvent * event)1621 gst_jpeg_dec_sink_event (GstVideoDecoder * bdec, GstEvent * event)
1622 {
1623 const GstSegment *segment;
1624
1625 if (GST_EVENT_TYPE (event) != GST_EVENT_SEGMENT)
1626 goto done;
1627
1628 gst_event_parse_segment (event, &segment);
1629
1630 if (segment->format == GST_FORMAT_TIME)
1631 gst_video_decoder_set_packetized (bdec, TRUE);
1632 else
1633 gst_video_decoder_set_packetized (bdec, FALSE);
1634
1635 done:
1636 return GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (bdec, event);
1637 }
1638
1639 static gboolean
gst_jpeg_dec_start(GstVideoDecoder * bdec)1640 gst_jpeg_dec_start (GstVideoDecoder * bdec)
1641 {
1642 GstJpegDec *dec = (GstJpegDec *) bdec;
1643
1644 #ifdef JCS_EXTENSIONS
1645 dec->format_convert = FALSE;
1646 #endif
1647 dec->saw_header = FALSE;
1648 dec->parse_entropy_len = 0;
1649 dec->parse_resync = FALSE;
1650
1651 gst_video_decoder_set_packetized (bdec, FALSE);
1652
1653 return TRUE;
1654 }
1655
1656 static gboolean
gst_jpeg_dec_flush(GstVideoDecoder * bdec)1657 gst_jpeg_dec_flush (GstVideoDecoder * bdec)
1658 {
1659 GstJpegDec *dec = (GstJpegDec *) bdec;
1660
1661 jpeg_abort_decompress (&dec->cinfo);
1662 dec->parse_entropy_len = 0;
1663 dec->parse_resync = FALSE;
1664 dec->saw_header = FALSE;
1665 #ifdef JCS_EXTENSIONS
1666 dec->format_convert = FALSE;
1667 #endif
1668
1669 return TRUE;
1670 }
1671
1672 static void
gst_jpeg_dec_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)1673 gst_jpeg_dec_set_property (GObject * object, guint prop_id,
1674 const GValue * value, GParamSpec * pspec)
1675 {
1676 GstJpegDec *dec;
1677
1678 dec = GST_JPEG_DEC (object);
1679
1680 switch (prop_id) {
1681 case PROP_IDCT_METHOD:
1682 dec->idct_method = g_value_get_enum (value);
1683 break;
1684 #ifndef GST_REMOVE_DEPRECATED
1685 case PROP_MAX_ERRORS:
1686 g_atomic_int_set (&dec->max_errors, g_value_get_int (value));
1687 break;
1688 #endif
1689 default:
1690 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1691 break;
1692 }
1693 }
1694
1695 static void
gst_jpeg_dec_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)1696 gst_jpeg_dec_get_property (GObject * object, guint prop_id, GValue * value,
1697 GParamSpec * pspec)
1698 {
1699 GstJpegDec *dec;
1700
1701 dec = GST_JPEG_DEC (object);
1702
1703 switch (prop_id) {
1704 case PROP_IDCT_METHOD:
1705 g_value_set_enum (value, dec->idct_method);
1706 break;
1707 #ifndef GST_REMOVE_DEPRECATED
1708 case PROP_MAX_ERRORS:
1709 g_value_set_int (value, g_atomic_int_get (&dec->max_errors));
1710 break;
1711 #endif
1712 default:
1713 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1714 break;
1715 }
1716 }
1717
1718 static gboolean
gst_jpeg_dec_stop(GstVideoDecoder * bdec)1719 gst_jpeg_dec_stop (GstVideoDecoder * bdec)
1720 {
1721 GstJpegDec *dec = (GstJpegDec *) bdec;
1722
1723 gst_jpeg_dec_free_buffers (dec);
1724
1725 g_free (dec->scratch);
1726 dec->scratch = NULL;
1727 dec->scratch_size = 0;
1728
1729 return TRUE;
1730 }
1731