1 /*
2 * Copyright (C) 2012 Collabora Ltd.
3 * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
4 * Copyright (C) 2013 Sebastian Dröge <slomo@circular-chaos.org>
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Library General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Library General Public License for more details.
15 *
16 * You should have received a copy of the GNU Library General Public
17 * License along with this library; if not, write to the
18 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
19 * Boston, MA 02110-1301, USA.
20 *
21 */
22
23 #ifdef HAVE_CONFIG_H
24 #include "config.h"
25 #endif
26
27 #include "gstopenjpegdec.h"
28
29
30 #include <string.h>
31
32 GST_DEBUG_CATEGORY_STATIC (gst_openjpeg_dec_debug);
33 #define GST_CAT_DEFAULT gst_openjpeg_dec_debug
34
35 static gboolean gst_openjpeg_dec_start (GstVideoDecoder * decoder);
36 static gboolean gst_openjpeg_dec_stop (GstVideoDecoder * decoder);
37 static gboolean gst_openjpeg_dec_set_format (GstVideoDecoder * decoder,
38 GstVideoCodecState * state);
39 static GstFlowReturn gst_openjpeg_dec_handle_frame (GstVideoDecoder * decoder,
40 GstVideoCodecFrame * frame);
41 static gboolean gst_openjpeg_dec_decide_allocation (GstVideoDecoder * decoder,
42 GstQuery * query);
43
44 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
45 #define GRAY16 "GRAY16_LE"
46 #define YUV10 "Y444_10LE, I422_10LE, I420_10LE"
47 #else
48 #define GRAY16 "GRAY16_BE"
49 #define YUV10 "Y444_10BE, I422_10BE, I420_10BE"
50 #endif
51
52 static GstStaticPadTemplate gst_openjpeg_dec_sink_template =
53 GST_STATIC_PAD_TEMPLATE ("sink",
54 GST_PAD_SINK,
55 GST_PAD_ALWAYS,
56 GST_STATIC_CAPS ("image/x-j2c, "
57 GST_JPEG2000_SAMPLING_LIST "; "
58 "image/x-jpc, " GST_JPEG2000_SAMPLING_LIST "; " "image/jp2")
59 );
60
61 static GstStaticPadTemplate gst_openjpeg_dec_src_template =
62 GST_STATIC_PAD_TEMPLATE ("src",
63 GST_PAD_SRC,
64 GST_PAD_ALWAYS,
65 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ ARGB64, ARGB, xRGB, "
66 "AYUV64, " YUV10 ", "
67 "AYUV, Y444, Y42B, I420, Y41B, YUV9, " "GRAY8, " GRAY16 " }"))
68 );
69
70 #define parent_class gst_openjpeg_dec_parent_class
71 G_DEFINE_TYPE (GstOpenJPEGDec, gst_openjpeg_dec, GST_TYPE_VIDEO_DECODER);
72
73 static void
gst_openjpeg_dec_class_init(GstOpenJPEGDecClass * klass)74 gst_openjpeg_dec_class_init (GstOpenJPEGDecClass * klass)
75 {
76 GstElementClass *element_class;
77 GstVideoDecoderClass *video_decoder_class;
78
79 element_class = (GstElementClass *) klass;
80 video_decoder_class = (GstVideoDecoderClass *) klass;
81
82 gst_element_class_add_static_pad_template (element_class,
83 &gst_openjpeg_dec_src_template);
84 gst_element_class_add_static_pad_template (element_class,
85 &gst_openjpeg_dec_sink_template);
86
87 gst_element_class_set_static_metadata (element_class,
88 "OpenJPEG JPEG2000 decoder",
89 "Codec/Decoder/Video",
90 "Decode JPEG2000 streams",
91 "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
92
93 video_decoder_class->start = GST_DEBUG_FUNCPTR (gst_openjpeg_dec_start);
94 video_decoder_class->stop = GST_DEBUG_FUNCPTR (gst_openjpeg_dec_stop);
95 video_decoder_class->set_format =
96 GST_DEBUG_FUNCPTR (gst_openjpeg_dec_set_format);
97 video_decoder_class->handle_frame =
98 GST_DEBUG_FUNCPTR (gst_openjpeg_dec_handle_frame);
99 video_decoder_class->decide_allocation = gst_openjpeg_dec_decide_allocation;
100
101 GST_DEBUG_CATEGORY_INIT (gst_openjpeg_dec_debug, "openjpegdec", 0,
102 "OpenJPEG Decoder");
103 }
104
105 static void
gst_openjpeg_dec_init(GstOpenJPEGDec * self)106 gst_openjpeg_dec_init (GstOpenJPEGDec * self)
107 {
108 GstVideoDecoder *decoder = (GstVideoDecoder *) self;
109
110 gst_video_decoder_set_packetized (decoder, TRUE);
111 gst_video_decoder_set_needs_format (decoder, TRUE);
112 gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
113 (self), TRUE);
114 GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (self));
115 opj_set_default_decoder_parameters (&self->params);
116 #ifdef HAVE_OPENJPEG_1
117 self->params.cp_limit_decoding = NO_LIMITATION;
118 #endif
119 self->sampling = GST_JPEG2000_SAMPLING_NONE;
120 }
121
122 static gboolean
gst_openjpeg_dec_start(GstVideoDecoder * decoder)123 gst_openjpeg_dec_start (GstVideoDecoder * decoder)
124 {
125 GstOpenJPEGDec *self = GST_OPENJPEG_DEC (decoder);
126
127 GST_DEBUG_OBJECT (self, "Starting");
128
129 return TRUE;
130 }
131
132 static gboolean
gst_openjpeg_dec_stop(GstVideoDecoder * video_decoder)133 gst_openjpeg_dec_stop (GstVideoDecoder * video_decoder)
134 {
135 GstOpenJPEGDec *self = GST_OPENJPEG_DEC (video_decoder);
136
137 GST_DEBUG_OBJECT (self, "Stopping");
138
139 if (self->output_state) {
140 gst_video_codec_state_unref (self->output_state);
141 self->output_state = NULL;
142 }
143
144 if (self->input_state) {
145 gst_video_codec_state_unref (self->input_state);
146 self->input_state = NULL;
147 }
148
149 GST_DEBUG_OBJECT (self, "Stopped");
150
151 return TRUE;
152 }
153
154 static gboolean
gst_openjpeg_dec_set_format(GstVideoDecoder * decoder,GstVideoCodecState * state)155 gst_openjpeg_dec_set_format (GstVideoDecoder * decoder,
156 GstVideoCodecState * state)
157 {
158 GstOpenJPEGDec *self = GST_OPENJPEG_DEC (decoder);
159 GstStructure *s;
160
161 GST_DEBUG_OBJECT (self, "Setting format: %" GST_PTR_FORMAT, state->caps);
162
163 s = gst_caps_get_structure (state->caps, 0);
164
165 self->color_space = OPJ_CLRSPC_UNKNOWN;
166
167 if (gst_structure_has_name (s, "image/jp2")) {
168 self->codec_format = OPJ_CODEC_JP2;
169 self->is_jp2c = FALSE;
170 } else if (gst_structure_has_name (s, "image/x-j2c")) {
171 self->codec_format = OPJ_CODEC_J2K;
172 self->is_jp2c = TRUE;
173 } else if (gst_structure_has_name (s, "image/x-jpc")) {
174 self->codec_format = OPJ_CODEC_J2K;
175 self->is_jp2c = FALSE;
176 } else {
177 g_return_val_if_reached (FALSE);
178 }
179
180
181 self->sampling =
182 gst_jpeg2000_sampling_from_string (gst_structure_get_string (s,
183 "sampling"));
184 if (gst_jpeg2000_sampling_is_rgb (self->sampling))
185 self->color_space = OPJ_CLRSPC_SRGB;
186 else if (gst_jpeg2000_sampling_is_mono (self->sampling))
187 self->color_space = OPJ_CLRSPC_GRAY;
188 else if (gst_jpeg2000_sampling_is_yuv (self->sampling))
189 self->color_space = OPJ_CLRSPC_SYCC;
190
191 self->ncomps = 0;
192 gst_structure_get_int (s, "num-components", &self->ncomps);
193
194 if (self->input_state)
195 gst_video_codec_state_unref (self->input_state);
196 self->input_state = gst_video_codec_state_ref (state);
197
198 return TRUE;
199 }
200
201 static gboolean
reverse_rgb_channels(GstJPEG2000Sampling sampling)202 reverse_rgb_channels (GstJPEG2000Sampling sampling)
203 {
204 return sampling == GST_JPEG2000_SAMPLING_BGR
205 || sampling == GST_JPEG2000_SAMPLING_BGRA;
206 }
207
208 static void
fill_frame_packed8_4(GstVideoFrame * frame,opj_image_t * image)209 fill_frame_packed8_4 (GstVideoFrame * frame, opj_image_t * image)
210 {
211 gint x, y, w, h, c;
212 guint8 *data_out, *tmp;
213 const gint *data_in[4];
214 gint dstride;
215 gint off[4];
216
217 w = GST_VIDEO_FRAME_WIDTH (frame);
218 h = GST_VIDEO_FRAME_HEIGHT (frame);
219 data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
220 dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
221
222 for (c = 0; c < 4; c++) {
223 data_in[c] = image->comps[c].data;
224 off[c] = 0x80 * image->comps[c].sgnd;
225 }
226
227 for (y = 0; y < h; y++) {
228 tmp = data_out;
229
230 for (x = 0; x < w; x++) {
231 /* alpha, from 4'th input channel */
232 tmp[0] = off[3] + *data_in[3];
233 /* colour channels */
234 tmp[1] = off[0] + *data_in[0];
235 tmp[2] = off[1] + *data_in[1];
236 tmp[3] = off[2] + *data_in[2];
237
238 tmp += 4;
239 data_in[0]++;
240 data_in[1]++;
241 data_in[2]++;
242 data_in[3]++;
243 }
244 data_out += dstride;
245 }
246 }
247
248 static void
fill_frame_packed16_4(GstVideoFrame * frame,opj_image_t * image)249 fill_frame_packed16_4 (GstVideoFrame * frame, opj_image_t * image)
250 {
251 gint x, y, w, h, c;
252 guint16 *data_out, *tmp;
253 const gint *data_in[4];
254 gint dstride;
255 gint shift[4], off[4];
256
257 w = GST_VIDEO_FRAME_WIDTH (frame);
258 h = GST_VIDEO_FRAME_HEIGHT (frame);
259 data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
260 dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2;
261
262 for (c = 0; c < 4; c++) {
263 data_in[c] = image->comps[c].data;
264 off[c] = (1 << (image->comps[c].prec - 1)) * image->comps[c].sgnd;
265 shift[c] =
266 MAX (MIN (GST_VIDEO_FRAME_COMP_DEPTH (frame, c) - image->comps[c].prec,
267 8), 0);
268 }
269
270 for (y = 0; y < h; y++) {
271 tmp = data_out;
272
273 for (x = 0; x < w; x++) {
274 /* alpha, from 4'th input channel */
275 tmp[0] = off[3] + (*data_in[3] << shift[3]);
276 /* colour channels */
277 tmp[1] = off[0] + (*data_in[0] << shift[0]);
278 tmp[2] = off[1] + (*data_in[1] << shift[1]);
279 tmp[3] = off[2] + (*data_in[2] << shift[2]);
280
281 tmp += 4;
282 data_in[0]++;
283 data_in[1]++;
284 data_in[2]++;
285 data_in[3]++;
286 }
287 data_out += dstride;
288 }
289 }
290
291 static void
fill_frame_packed8_3(GstVideoFrame * frame,opj_image_t * image)292 fill_frame_packed8_3 (GstVideoFrame * frame, opj_image_t * image)
293 {
294 gint x, y, w, h, c;
295 guint8 *data_out, *tmp;
296 const gint *data_in[3];
297 gint dstride;
298 gint off[3];
299
300 w = GST_VIDEO_FRAME_WIDTH (frame);
301 h = GST_VIDEO_FRAME_HEIGHT (frame);
302 data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
303 dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
304
305 for (c = 0; c < 3; c++) {
306 data_in[c] = image->comps[c].data;
307 off[c] = 0x80 * image->comps[c].sgnd;
308 };
309
310 for (y = 0; y < h; y++) {
311 tmp = data_out;
312
313 for (x = 0; x < w; x++) {
314 tmp[0] = off[0] + *data_in[0];
315 tmp[1] = off[1] + *data_in[1];
316 tmp[2] = off[2] + *data_in[2];
317 data_in[0]++;
318 data_in[1]++;
319 data_in[2]++;
320 tmp += 3;
321 }
322 data_out += dstride;
323 }
324 }
325
326 static void
fill_frame_packed16_3(GstVideoFrame * frame,opj_image_t * image)327 fill_frame_packed16_3 (GstVideoFrame * frame, opj_image_t * image)
328 {
329 gint x, y, w, h, c;
330 guint16 *data_out, *tmp;
331 const gint *data_in[3];
332 gint dstride;
333 gint shift[3], off[3];
334
335 w = GST_VIDEO_FRAME_WIDTH (frame);
336 h = GST_VIDEO_FRAME_HEIGHT (frame);
337 data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
338 dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2;
339
340 for (c = 0; c < 3; c++) {
341 data_in[c] = image->comps[c].data;
342 off[c] = (1 << (image->comps[c].prec - 1)) * image->comps[c].sgnd;
343 shift[c] =
344 MAX (MIN (GST_VIDEO_FRAME_COMP_DEPTH (frame, c) - image->comps[c].prec,
345 8), 0);
346 }
347
348 for (y = 0; y < h; y++) {
349 tmp = data_out;
350
351 for (x = 0; x < w; x++) {
352 tmp[1] = off[0] + (*data_in[0] << shift[0]);
353 tmp[2] = off[1] + (*data_in[1] << shift[1]);
354 tmp[3] = off[2] + (*data_in[2] << shift[2]);
355
356 tmp += 4;
357 data_in[0]++;
358 data_in[1]++;
359 data_in[2]++;
360 }
361 data_out += dstride;
362 }
363 }
364
365 /* for grayscale with alpha */
366 static void
fill_frame_packed8_2(GstVideoFrame * frame,opj_image_t * image)367 fill_frame_packed8_2 (GstVideoFrame * frame, opj_image_t * image)
368 {
369 gint x, y, w, h, c;
370 guint8 *data_out, *tmp;
371 const gint *data_in[2];
372 gint dstride;
373 gint off[2];
374
375 w = GST_VIDEO_FRAME_WIDTH (frame);
376 h = GST_VIDEO_FRAME_HEIGHT (frame);
377 data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
378 dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
379
380 for (c = 0; c < 2; c++) {
381 data_in[c] = image->comps[c].data;
382 off[c] = 0x80 * image->comps[c].sgnd;
383 };
384
385 for (y = 0; y < h; y++) {
386 tmp = data_out;
387
388 for (x = 0; x < w; x++) {
389 /* alpha, from 2nd input channel */
390 tmp[0] = off[1] + *data_in[1];
391 /* luminance, from first input channel */
392 tmp[1] = off[0] + *data_in[0];
393 tmp[2] = tmp[1];
394 tmp[3] = tmp[1];
395 data_in[0]++;
396 data_in[1]++;
397 tmp += 4;
398 }
399 data_out += dstride;
400 }
401 }
402
403 /* for grayscale with alpha */
404 static void
fill_frame_packed16_2(GstVideoFrame * frame,opj_image_t * image)405 fill_frame_packed16_2 (GstVideoFrame * frame, opj_image_t * image)
406 {
407 gint x, y, w, h, c;
408 guint16 *data_out, *tmp;
409 const gint *data_in[2];
410 gint dstride;
411 gint shift[2], off[2];
412
413 w = GST_VIDEO_FRAME_WIDTH (frame);
414 h = GST_VIDEO_FRAME_HEIGHT (frame);
415 data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
416 dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2;
417
418 for (c = 0; c < 2; c++) {
419 data_in[c] = image->comps[c].data;
420 off[c] = (1 << (image->comps[c].prec - 1)) * image->comps[c].sgnd;
421 shift[c] =
422 MAX (MIN (GST_VIDEO_FRAME_COMP_DEPTH (frame, c) - image->comps[c].prec,
423 8), 0);
424 }
425
426 for (y = 0; y < h; y++) {
427 tmp = data_out;
428
429 for (x = 0; x < w; x++) {
430 /* alpha, from 2nd input channel */
431 tmp[0] = off[1] + (*data_in[1] << shift[1]);
432 /* luminance, from first input channel */
433 tmp[1] = off[0] + (*data_in[0] << shift[0]);
434 tmp[2] = tmp[1];
435 tmp[3] = tmp[1];
436 tmp += 4;
437 data_in[0]++;
438 data_in[1]++;
439 }
440 data_out += dstride;
441 }
442 }
443
444
445 static void
fill_frame_planar8_1(GstVideoFrame * frame,opj_image_t * image)446 fill_frame_planar8_1 (GstVideoFrame * frame, opj_image_t * image)
447 {
448 gint x, y, w, h;
449 guint8 *data_out, *tmp;
450 const gint *data_in;
451 gint dstride;
452 gint off;
453
454 w = GST_VIDEO_FRAME_WIDTH (frame);
455 h = GST_VIDEO_FRAME_HEIGHT (frame);
456 data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
457 dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
458
459 data_in = image->comps[0].data;
460 off = 0x80 * image->comps[0].sgnd;
461
462 for (y = 0; y < h; y++) {
463 tmp = data_out;
464
465 for (x = 0; x < w; x++) {
466 *tmp = off + *data_in;
467
468 tmp++;
469 data_in++;
470 }
471 data_out += dstride;
472 }
473 }
474
475 static void
fill_frame_planar16_1(GstVideoFrame * frame,opj_image_t * image)476 fill_frame_planar16_1 (GstVideoFrame * frame, opj_image_t * image)
477 {
478 gint x, y, w, h;
479 guint16 *data_out, *tmp;
480 const gint *data_in;
481 gint dstride;
482 gint shift, off;
483
484 w = GST_VIDEO_FRAME_WIDTH (frame);
485 h = GST_VIDEO_FRAME_HEIGHT (frame);
486 data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
487 dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2;
488
489 data_in = image->comps[0].data;
490
491 off = (1 << (image->comps[0].prec - 1)) * image->comps[0].sgnd;
492 shift =
493 MAX (MIN (GST_VIDEO_FRAME_COMP_DEPTH (frame, 0) - image->comps[0].prec,
494 8), 0);
495
496 for (y = 0; y < h; y++) {
497 tmp = data_out;
498
499 for (x = 0; x < w; x++) {
500 *tmp = off + (*data_in << shift);
501
502 tmp++;
503 data_in++;
504 }
505 data_out += dstride;
506 }
507 }
508
509 static void
fill_frame_planar8_3(GstVideoFrame * frame,opj_image_t * image)510 fill_frame_planar8_3 (GstVideoFrame * frame, opj_image_t * image)
511 {
512 gint c, x, y, w, h;
513 guint8 *data_out, *tmp;
514 const gint *data_in;
515 gint dstride, off;
516
517 for (c = 0; c < 3; c++) {
518 w = GST_VIDEO_FRAME_COMP_WIDTH (frame, c);
519 h = GST_VIDEO_FRAME_COMP_HEIGHT (frame, c);
520 dstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, c);
521 data_out = GST_VIDEO_FRAME_COMP_DATA (frame, c);
522 data_in = image->comps[c].data;
523 off = 0x80 * image->comps[c].sgnd;
524
525 for (y = 0; y < h; y++) {
526 tmp = data_out;
527
528 for (x = 0; x < w; x++) {
529 *tmp = off + *data_in;
530 tmp++;
531 data_in++;
532 }
533 data_out += dstride;
534 }
535 }
536 }
537
538 static void
fill_frame_planar16_3(GstVideoFrame * frame,opj_image_t * image)539 fill_frame_planar16_3 (GstVideoFrame * frame, opj_image_t * image)
540 {
541 gint c, x, y, w, h;
542 guint16 *data_out, *tmp;
543 const gint *data_in;
544 gint dstride;
545 gint shift, off;
546
547 for (c = 0; c < 3; c++) {
548 w = GST_VIDEO_FRAME_COMP_WIDTH (frame, c);
549 h = GST_VIDEO_FRAME_COMP_HEIGHT (frame, c);
550 dstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, c) / 2;
551 data_out = (guint16 *) GST_VIDEO_FRAME_COMP_DATA (frame, c);
552 data_in = image->comps[c].data;
553 off = (1 << (image->comps[c].prec - 1)) * image->comps[c].sgnd;
554 shift =
555 MAX (MIN (GST_VIDEO_FRAME_COMP_DEPTH (frame, c) - image->comps[c].prec,
556 8), 0);
557
558 for (y = 0; y < h; y++) {
559 tmp = data_out;
560
561 for (x = 0; x < w; x++) {
562 *tmp = off + (*data_in << shift);
563 tmp++;
564 data_in++;
565 }
566 data_out += dstride;
567 }
568 }
569 }
570
571 static void
fill_frame_planar8_3_generic(GstVideoFrame * frame,opj_image_t * image)572 fill_frame_planar8_3_generic (GstVideoFrame * frame, opj_image_t * image)
573 {
574 gint x, y, w, h, c;
575 guint8 *data_out, *tmp;
576 const gint *data_in[3];
577 gint dstride;
578 gint dx[3], dy[3], off[3];
579
580 w = GST_VIDEO_FRAME_WIDTH (frame);
581 h = GST_VIDEO_FRAME_HEIGHT (frame);
582 data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
583 dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
584
585 for (c = 0; c < 3; c++) {
586 data_in[c] = image->comps[c].data;
587 dx[c] = image->comps[c].dx;
588 dy[c] = image->comps[c].dy;
589 off[c] = 0x80 * image->comps[c].sgnd;
590 }
591
592 for (y = 0; y < h; y++) {
593 tmp = data_out;
594
595 for (x = 0; x < w; x++) {
596 tmp[0] = 0xff;
597 tmp[1] = off[0] + data_in[0][((y / dy[0]) * w + x) / dx[0]];
598 tmp[2] = off[1] + data_in[1][((y / dy[1]) * w + x) / dx[1]];
599 tmp[3] = off[2] + data_in[2][((y / dy[2]) * w + x) / dx[2]];
600 tmp += 4;
601 }
602 data_out += dstride;
603 }
604 }
605
606 static void
fill_frame_planar8_4_generic(GstVideoFrame * frame,opj_image_t * image)607 fill_frame_planar8_4_generic (GstVideoFrame * frame, opj_image_t * image)
608 {
609 gint x, y, w, h, c;
610 guint8 *data_out, *tmp;
611 const gint *data_in[4];
612 gint dstride;
613 gint dx[4], dy[4], off[4];
614
615 w = GST_VIDEO_FRAME_WIDTH (frame);
616 h = GST_VIDEO_FRAME_HEIGHT (frame);
617 data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
618 dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
619
620 for (c = 0; c < 4; c++) {
621 data_in[c] = image->comps[c].data;
622 dx[c] = image->comps[c].dx;
623 dy[c] = image->comps[c].dy;
624 off[c] = 0x80 * image->comps[c].sgnd;
625 }
626
627 for (y = 0; y < h; y++) {
628 tmp = data_out;
629
630 for (x = 0; x < w; x++) {
631 tmp[0] = off[3] + data_in[3][((y / dy[3]) * w + x) / dx[3]];
632 tmp[1] = off[0] + data_in[0][((y / dy[0]) * w + x) / dx[0]];
633 tmp[2] = off[1] + data_in[1][((y / dy[1]) * w + x) / dx[1]];
634 tmp[3] = off[2] + data_in[2][((y / dy[2]) * w + x) / dx[2]];
635 tmp += 4;
636 }
637 data_out += dstride;
638 }
639 }
640
641 static void
fill_frame_planar16_3_generic(GstVideoFrame * frame,opj_image_t * image)642 fill_frame_planar16_3_generic (GstVideoFrame * frame, opj_image_t * image)
643 {
644 gint x, y, w, h, c;
645 guint16 *data_out, *tmp;
646 const gint *data_in[3];
647 gint dstride;
648 gint dx[3], dy[3], shift[3], off[3];
649
650 w = GST_VIDEO_FRAME_WIDTH (frame);
651 h = GST_VIDEO_FRAME_HEIGHT (frame);
652 data_out = (guint16 *) GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
653 dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2;
654
655 for (c = 0; c < 3; c++) {
656 dx[c] = image->comps[c].dx;
657 dy[c] = image->comps[c].dy;
658 data_in[c] = image->comps[c].data;
659 off[c] = (1 << (image->comps[c].prec - 1)) * image->comps[c].sgnd;
660 shift[c] =
661 MAX (MIN (GST_VIDEO_FRAME_COMP_DEPTH (frame, c) - image->comps[c].prec,
662 8), 0);
663 }
664
665 for (y = 0; y < h; y++) {
666 tmp = data_out;
667
668 for (x = 0; x < w; x++) {
669 tmp[0] = 0xff;
670 tmp[1] = off[0] + (data_in[0][((y / dy[0]) * w + x) / dx[0]] << shift[0]);
671 tmp[2] = off[1] + (data_in[1][((y / dy[1]) * w + x) / dx[1]] << shift[1]);
672 tmp[3] = off[2] + (data_in[2][((y / dy[2]) * w + x) / dx[2]] << shift[2]);
673 tmp += 4;
674 }
675 data_out += dstride;
676 }
677 }
678
679 static void
fill_frame_planar16_4_generic(GstVideoFrame * frame,opj_image_t * image)680 fill_frame_planar16_4_generic (GstVideoFrame * frame, opj_image_t * image)
681 {
682 gint x, y, w, h, c;
683 guint16 *data_out, *tmp;
684 const gint *data_in[4];
685 gint dstride;
686 gint dx[4], dy[4], shift[4], off[4];
687
688 w = GST_VIDEO_FRAME_WIDTH (frame);
689 h = GST_VIDEO_FRAME_HEIGHT (frame);
690 data_out = (guint16 *) GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
691 dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2;
692
693 for (c = 0; c < 4; c++) {
694 dx[c] = image->comps[c].dx;
695 dy[c] = image->comps[c].dy;
696 data_in[c] = image->comps[c].data;
697 off[c] = (1 << (image->comps[c].prec - 1)) * image->comps[c].sgnd;
698 shift[c] =
699 MAX (MIN (GST_VIDEO_FRAME_COMP_DEPTH (frame, c) - image->comps[c].prec,
700 8), 0);
701 }
702
703 for (y = 0; y < h; y++) {
704 tmp = data_out;
705
706 for (x = 0; x < w; x++) {
707 tmp[0] = off[3] + (data_in[3][((y / dy[3]) * w + x) / dx[3]] << shift[3]);
708 tmp[1] = off[0] + (data_in[0][((y / dy[0]) * w + x) / dx[0]] << shift[0]);
709 tmp[2] = off[1] + (data_in[1][((y / dy[1]) * w + x) / dx[1]] << shift[1]);
710 tmp[3] = off[2] + (data_in[2][((y / dy[2]) * w + x) / dx[2]] << shift[2]);
711 tmp += 4;
712 }
713 data_out += dstride;
714 }
715 }
716
717 static gint
get_highest_prec(opj_image_t * image)718 get_highest_prec (opj_image_t * image)
719 {
720 gint i;
721 gint ret = 0;
722
723 for (i = 0; i < image->numcomps; i++)
724 ret = MAX (image->comps[i].prec, ret);
725
726 return ret;
727 }
728
729
730 static GstFlowReturn
gst_openjpeg_dec_negotiate(GstOpenJPEGDec * self,opj_image_t * image)731 gst_openjpeg_dec_negotiate (GstOpenJPEGDec * self, opj_image_t * image)
732 {
733 GstVideoFormat format;
734 gint width, height;
735
736 if (image->color_space == OPJ_CLRSPC_UNKNOWN || image->color_space == 0)
737 image->color_space = self->color_space;
738
739 switch (image->color_space) {
740 case OPJ_CLRSPC_SRGB:
741 if (image->numcomps == 4) {
742 if (image->comps[0].dx != 1 || image->comps[0].dy != 1 ||
743 image->comps[1].dx != 1 || image->comps[1].dy != 1 ||
744 image->comps[2].dx != 1 || image->comps[2].dy != 1 ||
745 image->comps[3].dx != 1 || image->comps[3].dy != 1) {
746 GST_ERROR_OBJECT (self, "Sub-sampling for RGBA not supported");
747 return GST_FLOW_NOT_NEGOTIATED;
748 }
749
750 if (get_highest_prec (image) == 8) {
751 self->fill_frame = fill_frame_packed8_4;
752 format =
753 reverse_rgb_channels (self->sampling) ? GST_VIDEO_FORMAT_ABGR :
754 GST_VIDEO_FORMAT_ARGB;
755
756 } else if (get_highest_prec (image) <= 16) {
757 self->fill_frame = fill_frame_packed16_4;
758 format = GST_VIDEO_FORMAT_ARGB64;
759 } else {
760 GST_ERROR_OBJECT (self, "Unsupported depth %d", image->comps[3].prec);
761 return GST_FLOW_NOT_NEGOTIATED;
762 }
763 } else if (image->numcomps == 3) {
764 if (image->comps[0].dx != 1 || image->comps[0].dy != 1 ||
765 image->comps[1].dx != 1 || image->comps[1].dy != 1 ||
766 image->comps[2].dx != 1 || image->comps[2].dy != 1) {
767 GST_ERROR_OBJECT (self, "Sub-sampling for RGB not supported");
768 return GST_FLOW_NOT_NEGOTIATED;
769 }
770
771 if (get_highest_prec (image) == 8) {
772 self->fill_frame = fill_frame_packed8_3;
773 format =
774 reverse_rgb_channels (self->sampling) ? GST_VIDEO_FORMAT_BGR :
775 GST_VIDEO_FORMAT_RGB;
776 } else if (get_highest_prec (image) <= 16) {
777 self->fill_frame = fill_frame_packed16_3;
778 format = GST_VIDEO_FORMAT_ARGB64;
779 } else {
780 GST_ERROR_OBJECT (self, "Unsupported depth %d",
781 get_highest_prec (image));
782 return GST_FLOW_NOT_NEGOTIATED;
783 }
784 } else {
785 GST_ERROR_OBJECT (self, "Unsupported number of RGB components: %d",
786 image->numcomps);
787 return GST_FLOW_NOT_NEGOTIATED;
788 }
789 break;
790 case OPJ_CLRSPC_GRAY:
791 if (image->numcomps == 1) {
792 if (image->comps[0].dx != 1 && image->comps[0].dy != 1) {
793 GST_ERROR_OBJECT (self, "Sub-sampling for GRAY not supported");
794 return GST_FLOW_NOT_NEGOTIATED;
795 }
796
797 if (get_highest_prec (image) == 8) {
798 self->fill_frame = fill_frame_planar8_1;
799 format = GST_VIDEO_FORMAT_GRAY8;
800 } else if (get_highest_prec (image) <= 16) {
801 self->fill_frame = fill_frame_planar16_1;
802 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
803 format = GST_VIDEO_FORMAT_GRAY16_LE;
804 #else
805 format = GST_VIDEO_FORMAT_GRAY16_BE;
806 #endif
807 } else {
808 GST_ERROR_OBJECT (self, "Unsupported depth %d",
809 get_highest_prec (image));
810 return GST_FLOW_NOT_NEGOTIATED;
811 }
812 } else if (image->numcomps == 2) {
813 if ((image->comps[0].dx != 1 && image->comps[0].dy != 1) ||
814 (image->comps[1].dx != 1 && image->comps[1].dy != 1)) {
815 GST_ERROR_OBJECT (self, "Sub-sampling for GRAY not supported");
816 return GST_FLOW_NOT_NEGOTIATED;
817 }
818 if (get_highest_prec (image) == 8) {
819 self->fill_frame = fill_frame_packed8_2;
820 format = GST_VIDEO_FORMAT_ARGB;
821 } else if (get_highest_prec (image) <= 16) {
822 self->fill_frame = fill_frame_packed16_2;
823 format = GST_VIDEO_FORMAT_ARGB64;
824 } else {
825 GST_ERROR_OBJECT (self, "Unsupported depth %d",
826 get_highest_prec (image));
827 return GST_FLOW_NOT_NEGOTIATED;
828 }
829 } else {
830 GST_ERROR_OBJECT (self, "Unsupported number of GRAY components: %d",
831 image->numcomps);
832 return GST_FLOW_NOT_NEGOTIATED;
833 }
834 break;
835 case OPJ_CLRSPC_SYCC:
836 if (image->numcomps != 3 && image->numcomps != 4) {
837 GST_ERROR_OBJECT (self, "Unsupported number of YUV components: %d",
838 image->numcomps);
839 return GST_FLOW_NOT_NEGOTIATED;
840 }
841
842 if (image->comps[0].dx != 1 || image->comps[0].dy != 1) {
843 GST_ERROR_OBJECT (self, "Sub-sampling of luma plane not supported");
844 return GST_FLOW_NOT_NEGOTIATED;
845 }
846
847 if (image->comps[1].dx != image->comps[2].dx ||
848 image->comps[1].dy != image->comps[2].dy) {
849 GST_ERROR_OBJECT (self,
850 "Different sub-sampling of chroma planes not supported");
851 return GST_FLOW_ERROR;
852 }
853
854 if (image->numcomps == 4) {
855 if (image->comps[3].dx != 1 || image->comps[3].dy != 1) {
856 GST_ERROR_OBJECT (self, "Sub-sampling of alpha plane not supported");
857 return GST_FLOW_NOT_NEGOTIATED;
858 }
859
860 if (get_highest_prec (image) == 8) {
861 self->fill_frame = fill_frame_planar8_4_generic;
862 format = GST_VIDEO_FORMAT_AYUV;
863 } else if (image->comps[3].prec <= 16) {
864 self->fill_frame = fill_frame_planar16_4_generic;
865 format = GST_VIDEO_FORMAT_AYUV64;
866 } else {
867 GST_ERROR_OBJECT (self, "Unsupported depth %d", image->comps[0].prec);
868 return GST_FLOW_NOT_NEGOTIATED;
869 }
870 } else if (image->numcomps == 3) {
871 if (get_highest_prec (image) == 8) {
872 if (image->comps[1].dx == 1 && image->comps[1].dy == 1) {
873 self->fill_frame = fill_frame_planar8_3;
874 format = GST_VIDEO_FORMAT_Y444;
875 } else if (image->comps[1].dx == 2 && image->comps[1].dy == 1) {
876 self->fill_frame = fill_frame_planar8_3;
877 format = GST_VIDEO_FORMAT_Y42B;
878 } else if (image->comps[1].dx == 2 && image->comps[1].dy == 2) {
879 self->fill_frame = fill_frame_planar8_3;
880 format = GST_VIDEO_FORMAT_I420;
881 } else if (image->comps[1].dx == 4 && image->comps[1].dy == 1) {
882 self->fill_frame = fill_frame_planar8_3;
883 format = GST_VIDEO_FORMAT_Y41B;
884 } else if (image->comps[1].dx == 4 && image->comps[1].dy == 4) {
885 self->fill_frame = fill_frame_planar8_3;
886 format = GST_VIDEO_FORMAT_YUV9;
887 } else {
888 self->fill_frame = fill_frame_planar8_3_generic;
889 format = GST_VIDEO_FORMAT_AYUV;
890 }
891 } else if (get_highest_prec (image) <= 16) {
892 if (image->comps[0].prec == 10 &&
893 image->comps[1].prec == 10 && image->comps[2].prec == 10) {
894 if (image->comps[1].dx == 1 && image->comps[1].dy == 1) {
895 self->fill_frame = fill_frame_planar16_3;
896 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
897 format = GST_VIDEO_FORMAT_Y444_10LE;
898 #else
899 format = GST_VIDEO_FORMAT_Y444_10BE;
900 #endif
901 } else if (image->comps[1].dx == 2 && image->comps[1].dy == 1) {
902 self->fill_frame = fill_frame_planar16_3;
903 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
904 format = GST_VIDEO_FORMAT_I422_10LE;
905 #else
906 format = GST_VIDEO_FORMAT_I422_10BE;
907 #endif
908 } else if (image->comps[1].dx == 2 && image->comps[1].dy == 2) {
909 self->fill_frame = fill_frame_planar16_3;
910 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
911 format = GST_VIDEO_FORMAT_I420_10LE;
912 #else
913 format = GST_VIDEO_FORMAT_I420_10BE;
914 #endif
915 } else {
916 self->fill_frame = fill_frame_planar16_3_generic;
917 format = GST_VIDEO_FORMAT_AYUV64;
918 }
919 } else {
920 self->fill_frame = fill_frame_planar16_3_generic;
921 format = GST_VIDEO_FORMAT_AYUV64;
922 }
923 } else {
924 GST_ERROR_OBJECT (self, "Unsupported depth %d",
925 get_highest_prec (image));
926 return GST_FLOW_NOT_NEGOTIATED;
927 }
928 } else {
929 GST_ERROR_OBJECT (self, "Unsupported number of YUV components: %d",
930 image->numcomps);
931 return GST_FLOW_NOT_NEGOTIATED;
932 }
933 break;
934 default:
935 GST_ERROR_OBJECT (self, "Unsupported colorspace %d", image->color_space);
936 return GST_FLOW_NOT_NEGOTIATED;
937 }
938
939 width = image->x1 - image->x0;
940 height = image->y1 - image->y0;
941
942 if (!self->output_state ||
943 self->output_state->info.finfo->format != format ||
944 self->output_state->info.width != width ||
945 self->output_state->info.height != height) {
946 if (self->output_state)
947 gst_video_codec_state_unref (self->output_state);
948 self->output_state =
949 gst_video_decoder_set_output_state (GST_VIDEO_DECODER (self), format,
950 width, height, self->input_state);
951
952 if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (self)))
953 return GST_FLOW_NOT_NEGOTIATED;
954 }
955
956 return GST_FLOW_OK;
957 }
958
959 static void
gst_openjpeg_dec_opj_error(const char * msg,void * userdata)960 gst_openjpeg_dec_opj_error (const char *msg, void *userdata)
961 {
962 GstOpenJPEGDec *self = GST_OPENJPEG_DEC (userdata);
963 gchar *trimmed = g_strchomp (g_strdup (msg));
964 GST_TRACE_OBJECT (self, "openjpeg error: %s", trimmed);
965 g_free (trimmed);
966 }
967
968 static void
gst_openjpeg_dec_opj_warning(const char * msg,void * userdata)969 gst_openjpeg_dec_opj_warning (const char *msg, void *userdata)
970 {
971 GstOpenJPEGDec *self = GST_OPENJPEG_DEC (userdata);
972 gchar *trimmed = g_strchomp (g_strdup (msg));
973 GST_TRACE_OBJECT (self, "openjpeg warning: %s", trimmed);
974 g_free (trimmed);
975 }
976
977 static void
gst_openjpeg_dec_opj_info(const char * msg,void * userdata)978 gst_openjpeg_dec_opj_info (const char *msg, void *userdata)
979 {
980 GstOpenJPEGDec *self = GST_OPENJPEG_DEC (userdata);
981 gchar *trimmed = g_strchomp (g_strdup (msg));
982 GST_TRACE_OBJECT (self, "openjpeg info: %s", trimmed);
983 g_free (trimmed);
984 }
985
986 #ifndef HAVE_OPENJPEG_1
987 typedef struct
988 {
989 guint8 *data;
990 guint offset, size;
991 } MemStream;
992
993 static OPJ_SIZE_T
read_fn(void * p_buffer,OPJ_SIZE_T p_nb_bytes,void * p_user_data)994 read_fn (void *p_buffer, OPJ_SIZE_T p_nb_bytes, void *p_user_data)
995 {
996 MemStream *mstream = p_user_data;
997 OPJ_SIZE_T read;
998
999 if (mstream->offset == mstream->size)
1000 return -1;
1001
1002 if (mstream->offset + p_nb_bytes > mstream->size)
1003 read = mstream->size - mstream->offset;
1004 else
1005 read = p_nb_bytes;
1006
1007 memcpy (p_buffer, mstream->data + mstream->offset, read);
1008 mstream->offset += read;
1009
1010 return read;
1011 }
1012
1013 static OPJ_SIZE_T
write_fn(void * p_buffer,OPJ_SIZE_T p_nb_bytes,void * p_user_data)1014 write_fn (void *p_buffer, OPJ_SIZE_T p_nb_bytes, void *p_user_data)
1015 {
1016 g_return_val_if_reached (-1);
1017 }
1018
1019 static OPJ_OFF_T
skip_fn(OPJ_OFF_T p_nb_bytes,void * p_user_data)1020 skip_fn (OPJ_OFF_T p_nb_bytes, void *p_user_data)
1021 {
1022 MemStream *mstream = p_user_data;
1023 OPJ_OFF_T skip;
1024
1025 if (mstream->offset + p_nb_bytes > mstream->size)
1026 skip = mstream->size - mstream->offset;
1027 else
1028 skip = p_nb_bytes;
1029
1030 mstream->offset += skip;
1031
1032 return skip;
1033 }
1034
1035 static OPJ_BOOL
seek_fn(OPJ_OFF_T p_nb_bytes,void * p_user_data)1036 seek_fn (OPJ_OFF_T p_nb_bytes, void *p_user_data)
1037 {
1038 MemStream *mstream = p_user_data;
1039
1040 if (p_nb_bytes > mstream->size)
1041 return OPJ_FALSE;
1042
1043 mstream->offset = p_nb_bytes;
1044
1045 return OPJ_TRUE;
1046 }
1047 #endif
1048
1049 static GstFlowReturn
gst_openjpeg_dec_handle_frame(GstVideoDecoder * decoder,GstVideoCodecFrame * frame)1050 gst_openjpeg_dec_handle_frame (GstVideoDecoder * decoder,
1051 GstVideoCodecFrame * frame)
1052 {
1053 GstOpenJPEGDec *self = GST_OPENJPEG_DEC (decoder);
1054 GstFlowReturn ret = GST_FLOW_OK;
1055 gint64 deadline;
1056 GstMapInfo map;
1057 #ifdef HAVE_OPENJPEG_1
1058 opj_dinfo_t *dec;
1059 opj_cio_t *io;
1060 #else
1061 opj_codec_t *dec;
1062 opj_stream_t *stream;
1063 MemStream mstream;
1064 #endif
1065 opj_image_t *image;
1066 GstVideoFrame vframe;
1067 opj_dparameters_t params;
1068
1069 GST_DEBUG_OBJECT (self, "Handling frame");
1070
1071 deadline = gst_video_decoder_get_max_decode_time (decoder, frame);
1072 if (deadline < 0) {
1073 GST_LOG_OBJECT (self, "Dropping too late frame: deadline %" G_GINT64_FORMAT,
1074 deadline);
1075 ret = gst_video_decoder_drop_frame (decoder, frame);
1076 return ret;
1077 }
1078
1079 dec = opj_create_decompress (self->codec_format);
1080 if (!dec)
1081 goto initialization_error;
1082
1083 #ifdef HAVE_OPENJPEG_1
1084 if (G_UNLIKELY (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >=
1085 GST_LEVEL_TRACE)) {
1086 opj_event_mgr_t callbacks;
1087
1088 callbacks.error_handler = gst_openjpeg_dec_opj_error;
1089 callbacks.warning_handler = gst_openjpeg_dec_opj_warning;
1090 callbacks.info_handler = gst_openjpeg_dec_opj_info;
1091 opj_set_event_mgr ((opj_common_ptr) dec, &callbacks, self);
1092 } else {
1093 opj_set_event_mgr ((opj_common_ptr) dec, NULL, NULL);
1094 }
1095 #else
1096 if (G_UNLIKELY (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >=
1097 GST_LEVEL_TRACE)) {
1098 opj_set_info_handler (dec, gst_openjpeg_dec_opj_info, self);
1099 opj_set_warning_handler (dec, gst_openjpeg_dec_opj_warning, self);
1100 opj_set_error_handler (dec, gst_openjpeg_dec_opj_error, self);
1101 } else {
1102 opj_set_info_handler (dec, NULL, NULL);
1103 opj_set_warning_handler (dec, NULL, NULL);
1104 opj_set_error_handler (dec, NULL, NULL);
1105 }
1106 #endif
1107
1108 params = self->params;
1109 if (self->ncomps)
1110 params.jpwl_exp_comps = self->ncomps;
1111 opj_setup_decoder (dec, ¶ms);
1112
1113 if (!gst_buffer_map (frame->input_buffer, &map, GST_MAP_READ))
1114 goto map_read_error;
1115
1116 if (self->is_jp2c && map.size < 8)
1117 goto open_error;
1118
1119 #ifdef HAVE_OPENJPEG_1
1120 io = opj_cio_open ((opj_common_ptr) dec, map.data + (self->is_jp2c ? 8 : 0),
1121 map.size - (self->is_jp2c ? 8 : 0));
1122 if (!io)
1123 goto open_error;
1124
1125 image = opj_decode (dec, io);
1126 if (!image)
1127 goto decode_error;
1128 #else
1129 stream = opj_stream_create (4096, OPJ_TRUE);
1130 if (!stream)
1131 goto open_error;
1132
1133 mstream.data = map.data + (self->is_jp2c ? 8 : 0);
1134 mstream.offset = 0;
1135 mstream.size = map.size - (self->is_jp2c ? 8 : 0);
1136
1137 opj_stream_set_read_function (stream, read_fn);
1138 opj_stream_set_write_function (stream, write_fn);
1139 opj_stream_set_skip_function (stream, skip_fn);
1140 opj_stream_set_seek_function (stream, seek_fn);
1141 opj_stream_set_user_data (stream, &mstream, NULL);
1142 opj_stream_set_user_data_length (stream, mstream.size);
1143
1144 image = NULL;
1145 if (!opj_read_header (stream, dec, &image))
1146 goto decode_error;
1147
1148 if (!opj_decode (dec, stream, image))
1149 goto decode_error;
1150 #endif
1151
1152 {
1153 gint i;
1154
1155 for (i = 0; i < image->numcomps; i++) {
1156 if (image->comps[i].data == NULL)
1157 goto decode_error;
1158 }
1159 }
1160
1161 gst_buffer_unmap (frame->input_buffer, &map);
1162
1163 ret = gst_openjpeg_dec_negotiate (self, image);
1164 if (ret != GST_FLOW_OK)
1165 goto negotiate_error;
1166
1167 ret = gst_video_decoder_allocate_output_frame (decoder, frame);
1168 if (ret != GST_FLOW_OK)
1169 goto allocate_error;
1170
1171 if (!gst_video_frame_map (&vframe, &self->output_state->info,
1172 frame->output_buffer, GST_MAP_WRITE))
1173 goto map_write_error;
1174
1175 self->fill_frame (&vframe, image);
1176
1177 gst_video_frame_unmap (&vframe);
1178
1179 #ifdef HAVE_OPENJPEG_1
1180 opj_cio_close (io);
1181 opj_image_destroy (image);
1182 opj_destroy_decompress (dec);
1183 #else
1184 opj_end_decompress (dec, stream);
1185 opj_stream_destroy (stream);
1186 opj_image_destroy (image);
1187 opj_destroy_codec (dec);
1188 #endif
1189
1190 ret = gst_video_decoder_finish_frame (decoder, frame);
1191
1192 return ret;
1193
1194 initialization_error:
1195 {
1196 gst_video_codec_frame_unref (frame);
1197 GST_ELEMENT_ERROR (self, LIBRARY, INIT,
1198 ("Failed to initialize OpenJPEG decoder"), (NULL));
1199 return GST_FLOW_ERROR;
1200 }
1201 map_read_error:
1202 {
1203 #ifdef HAVE_OPENJPEG_1
1204 opj_destroy_decompress (dec);
1205 #else
1206 opj_destroy_codec (dec);
1207 #endif
1208 gst_video_codec_frame_unref (frame);
1209
1210 GST_ELEMENT_ERROR (self, CORE, FAILED,
1211 ("Failed to map input buffer"), (NULL));
1212 return GST_FLOW_ERROR;
1213 }
1214 open_error:
1215 {
1216 #ifdef HAVE_OPENJPEG_1
1217 opj_destroy_decompress (dec);
1218 #else
1219 opj_destroy_codec (dec);
1220 #endif
1221 gst_buffer_unmap (frame->input_buffer, &map);
1222 gst_video_codec_frame_unref (frame);
1223
1224 GST_ELEMENT_ERROR (self, LIBRARY, INIT,
1225 ("Failed to open OpenJPEG stream"), (NULL));
1226 return GST_FLOW_ERROR;
1227 }
1228 decode_error:
1229 {
1230 if (image)
1231 opj_image_destroy (image);
1232 #ifdef HAVE_OPENJPEG_1
1233 opj_cio_close (io);
1234 opj_destroy_decompress (dec);
1235 #else
1236 opj_stream_destroy (stream);
1237 opj_destroy_codec (dec);
1238 #endif
1239 gst_buffer_unmap (frame->input_buffer, &map);
1240 gst_video_codec_frame_unref (frame);
1241
1242 GST_VIDEO_DECODER_ERROR (self, 1, STREAM, DECODE,
1243 ("Failed to decode OpenJPEG stream"), (NULL), ret);
1244 return ret;
1245 }
1246 negotiate_error:
1247 {
1248 opj_image_destroy (image);
1249 #ifdef HAVE_OPENJPEG_1
1250 opj_cio_close (io);
1251 opj_destroy_decompress (dec);
1252 #else
1253 opj_stream_destroy (stream);
1254 opj_destroy_codec (dec);
1255 #endif
1256 gst_video_codec_frame_unref (frame);
1257
1258 GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
1259 ("Failed to negotiate"), (NULL));
1260 return ret;
1261 }
1262 allocate_error:
1263 {
1264 opj_image_destroy (image);
1265 #ifdef HAVE_OPENJPEG_1
1266 opj_cio_close (io);
1267 opj_destroy_decompress (dec);
1268 #else
1269 opj_stream_destroy (stream);
1270 opj_destroy_codec (dec);
1271 #endif
1272 gst_video_codec_frame_unref (frame);
1273
1274 GST_ELEMENT_ERROR (self, CORE, FAILED,
1275 ("Failed to allocate output buffer"), (NULL));
1276 return ret;
1277 }
1278 map_write_error:
1279 {
1280 opj_image_destroy (image);
1281 #ifdef HAVE_OPENJPEG_1
1282 opj_cio_close (io);
1283 opj_destroy_decompress (dec);
1284 #else
1285 opj_stream_destroy (stream);
1286 opj_destroy_codec (dec);
1287 #endif
1288 gst_video_codec_frame_unref (frame);
1289
1290 GST_ELEMENT_ERROR (self, CORE, FAILED,
1291 ("Failed to map output buffer"), (NULL));
1292 return GST_FLOW_ERROR;
1293 }
1294 }
1295
1296 static gboolean
gst_openjpeg_dec_decide_allocation(GstVideoDecoder * decoder,GstQuery * query)1297 gst_openjpeg_dec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
1298 {
1299 GstBufferPool *pool;
1300 GstStructure *config;
1301
1302 if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder,
1303 query))
1304 return FALSE;
1305
1306 g_assert (gst_query_get_n_allocation_pools (query) > 0);
1307 gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
1308 g_assert (pool != NULL);
1309
1310 config = gst_buffer_pool_get_config (pool);
1311 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
1312 gst_buffer_pool_config_add_option (config,
1313 GST_BUFFER_POOL_OPTION_VIDEO_META);
1314 }
1315 gst_buffer_pool_set_config (pool, config);
1316 gst_object_unref (pool);
1317
1318 return TRUE;
1319 }
1320