• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2012 Collabora Ltd.
3  *     Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
4  * Copyright (C) 2013 Sebastian Dröge <slomo@circular-chaos.org>
5  *
6  * This library is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Library General Public
8  * License as published by the Free Software Foundation; either
9  * version 2 of the License, or (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  * Library General Public License for more details.
15  *
16  * You should have received a copy of the GNU Library General Public
17  * License along with this library; if not, write to the
18  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
19  * Boston, MA 02110-1301, USA.
20  *
21  */
22 
23 /**
24  * SECTION:element-openjpegdec
25  * @title: openjpegdec
26  * @see_also: openjpegenc
27  *
28  * openjpegdec decodes openjpeg stream.
29  *
30  * ## Example launch lines
31  * |[
32  * gst-launch-1.0 -v videotestsrc num-buffers=10 ! openjpegenc ! jpeg2000parse ! openjpegdec ! videoconvert ! autovideosink sync=false
33  * ]| Encode and decode whole frames.
34  * |[
35  * gst-launch-1.0 -v videotestsrc num-buffers=10 ! openjpegenc num-threads=8 num-stripes=8 ! jpeg2000parse ! openjpegdec max-slice-threads=8 ! videoconvert ! autovideosink sync=fals
36  * ]| Encode and decode frame split with stripes.
37  *
38  */
39 
40 #ifdef HAVE_CONFIG_H
41 #include "config.h"
42 #endif
43 
44 #include "gstopenjpegdec.h"
45 
46 
47 #include <string.h>
48 
49 GST_DEBUG_CATEGORY_STATIC (gst_openjpeg_dec_debug);
50 #define GST_CAT_DEFAULT gst_openjpeg_dec_debug
51 
52 enum
53 {
54   PROP_0,
55   PROP_MAX_THREADS,
56   PROP_MAX_SLICE_THREADS,
57   PROP_LAST
58 };
59 
60 #define GST_OPENJPEG_DEC_DEFAULT_MAX_THREADS		0
61 
62 /* prototypes */
63 static void gst_openjpeg_dec_finalize (GObject * object);
64 
65 static GstStateChangeReturn
66 gst_openjpeg_dec_change_state (GstElement * element, GstStateChange transition);
67 
68 static gboolean gst_openjpeg_dec_start (GstVideoDecoder * decoder);
69 static gboolean gst_openjpeg_dec_stop (GstVideoDecoder * decoder);
70 static gboolean gst_openjpeg_dec_set_format (GstVideoDecoder * decoder,
71     GstVideoCodecState * state);
72 static gboolean gst_openjpeg_dec_flush (GstVideoDecoder * decoder);
73 static gboolean gst_openjpeg_dec_finish (GstVideoDecoder * decoder);
74 static GstFlowReturn gst_openjpeg_dec_handle_frame (GstVideoDecoder * decoder,
75     GstVideoCodecFrame * frame);
76 static gboolean gst_openjpeg_dec_decide_allocation (GstVideoDecoder * decoder,
77     GstQuery * query);
78 static void gst_openjpeg_dec_set_property (GObject * object,
79     guint prop_id, const GValue * value, GParamSpec * pspec);
80 static void gst_openjpeg_dec_get_property (GObject * object,
81     guint prop_id, GValue * value, GParamSpec * pspec);
82 
83 static gboolean gst_openjpeg_dec_decode_frame_multiple (GstVideoDecoder *
84     decoder, GstVideoCodecFrame * frame);
85 static gboolean gst_openjpeg_dec_decode_frame_single (GstVideoDecoder * decoder,
86     GstVideoCodecFrame * frame);
87 
88 static void gst_openjpeg_dec_pause_loop (GstOpenJPEGDec * self,
89     GstFlowReturn flow_ret);
90 
91 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
92 #define GRAY16 "GRAY16_LE"
93 #define YUV10 "Y444_10LE, I422_10LE, I420_10LE"
94 #else
95 #define GRAY16 "GRAY16_BE"
96 #define YUV10 "Y444_10BE, I422_10BE, I420_10BE"
97 #endif
98 
99 static GstStaticPadTemplate gst_openjpeg_dec_sink_template =
100     GST_STATIC_PAD_TEMPLATE ("sink",
101     GST_PAD_SINK,
102     GST_PAD_ALWAYS,
103     GST_STATIC_CAPS ("image/x-j2c, "
104         GST_JPEG2000_SAMPLING_LIST "; "
105         "image/x-jpc,"
106         GST_JPEG2000_SAMPLING_LIST "; image/jp2 ; "
107         "image/x-jpc-striped, "
108         "num-stripes = (int) [2, MAX], " GST_JPEG2000_SAMPLING_LIST)
109     );
110 
111 static GstStaticPadTemplate gst_openjpeg_dec_src_template =
112 GST_STATIC_PAD_TEMPLATE ("src",
113     GST_PAD_SRC,
114     GST_PAD_ALWAYS,
115     GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ ARGB64, ARGB, xRGB, "
116             "AYUV64, " YUV10 ", "
117             "AYUV, Y444, Y42B, I420, Y41B, YUV9, " "GRAY8, " GRAY16 " }"))
118     );
119 
120 #define parent_class gst_openjpeg_dec_parent_class
121 G_DEFINE_TYPE (GstOpenJPEGDec, gst_openjpeg_dec, GST_TYPE_VIDEO_DECODER);
122 GST_ELEMENT_REGISTER_DEFINE (openjpegdec, "openjpegdec",
123     GST_RANK_PRIMARY, GST_TYPE_OPENJPEG_DEC);
124 
125 static void
gst_openjpeg_dec_class_init(GstOpenJPEGDecClass * klass)126 gst_openjpeg_dec_class_init (GstOpenJPEGDecClass * klass)
127 {
128   GstElementClass *element_class;
129   GstVideoDecoderClass *video_decoder_class;
130   GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
131 
132   element_class = (GstElementClass *) klass;
133   video_decoder_class = (GstVideoDecoderClass *) klass;
134 
135   gst_element_class_add_static_pad_template (element_class,
136       &gst_openjpeg_dec_src_template);
137   gst_element_class_add_static_pad_template (element_class,
138       &gst_openjpeg_dec_sink_template);
139 
140   gst_element_class_set_static_metadata (element_class,
141       "OpenJPEG JPEG2000 decoder",
142       "Codec/Decoder/Video",
143       "Decode JPEG2000 streams",
144       "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
145 
146   element_class->change_state =
147       GST_DEBUG_FUNCPTR (gst_openjpeg_dec_change_state);
148 
149   video_decoder_class->start = GST_DEBUG_FUNCPTR (gst_openjpeg_dec_start);
150   video_decoder_class->stop = GST_DEBUG_FUNCPTR (gst_openjpeg_dec_stop);
151   video_decoder_class->flush = GST_DEBUG_FUNCPTR (gst_openjpeg_dec_flush);
152   video_decoder_class->finish = GST_DEBUG_FUNCPTR (gst_openjpeg_dec_finish);
153   video_decoder_class->set_format =
154       GST_DEBUG_FUNCPTR (gst_openjpeg_dec_set_format);
155   video_decoder_class->handle_frame =
156       GST_DEBUG_FUNCPTR (gst_openjpeg_dec_handle_frame);
157   video_decoder_class->decide_allocation = gst_openjpeg_dec_decide_allocation;
158   gobject_class->set_property = gst_openjpeg_dec_set_property;
159   gobject_class->get_property = gst_openjpeg_dec_get_property;
160   gobject_class->finalize = gst_openjpeg_dec_finalize;
161 
162   /**
163    * GstOpenJPEGDec:max-slice-threads:
164    *
165    * Maximum number of worker threads to spawn. (0 = auto)
166    *
167    * Since: 1.20
168    */
169   g_object_class_install_property (G_OBJECT_CLASS (klass),
170       PROP_MAX_SLICE_THREADS, g_param_spec_int ("max-slice-threads",
171           "Maximum slice decoding threads",
172           "Maximum number of worker threads to spawn according to the frame boundary. (0 = no thread)",
173           0, G_MAXINT, GST_OPENJPEG_DEC_DEFAULT_MAX_THREADS,
174           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
175   /**
176    * GstOpenJPEGDec:max-threads:
177    *
178    * Maximum number of worker threads to spawn used by openjpeg internally. (0 = no thread)
179    *
180    * Since: 1.18
181    */
182   g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_MAX_THREADS,
183       g_param_spec_int ("max-threads", "Maximum openjpeg threads",
184           "Maximum number of worker threads to spawn used by openjpeg internally. (0 = no thread)",
185           0, G_MAXINT, GST_OPENJPEG_DEC_DEFAULT_MAX_THREADS,
186           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
187 
188   GST_DEBUG_CATEGORY_INIT (gst_openjpeg_dec_debug, "openjpegdec", 0,
189       "OpenJPEG Decoder");
190 }
191 
192 static void
gst_openjpeg_dec_init(GstOpenJPEGDec * self)193 gst_openjpeg_dec_init (GstOpenJPEGDec * self)
194 {
195   GstVideoDecoder *decoder = (GstVideoDecoder *) self;
196 
197   gst_video_decoder_set_packetized (decoder, TRUE);
198   gst_video_decoder_set_needs_format (decoder, TRUE);
199   gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
200       (self), TRUE);
201   GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (self));
202   opj_set_default_decoder_parameters (&self->params);
203   self->sampling = GST_JPEG2000_SAMPLING_NONE;
204   self->max_slice_threads = GST_OPENJPEG_DEC_DEFAULT_MAX_THREADS;
205   self->available_threads = GST_OPENJPEG_DEC_DEFAULT_MAX_THREADS;
206   self->num_procs = g_get_num_processors ();
207   g_mutex_init (&self->messages_lock);
208   g_mutex_init (&self->decoding_lock);
209   g_cond_init (&self->messages_cond);
210   g_queue_init (&self->messages);
211 }
212 
213 static gboolean
gst_openjpeg_dec_start(GstVideoDecoder * decoder)214 gst_openjpeg_dec_start (GstVideoDecoder * decoder)
215 {
216   GstOpenJPEGDec *self = GST_OPENJPEG_DEC (decoder);
217 
218   GST_DEBUG_OBJECT (self, "Starting");
219   self->available_threads = self->max_slice_threads;
220   if (self->available_threads)
221     self->decode_frame = gst_openjpeg_dec_decode_frame_multiple;
222   else
223     self->decode_frame = gst_openjpeg_dec_decode_frame_single;
224 
225   return TRUE;
226 }
227 
228 static gboolean
gst_openjpeg_dec_stop(GstVideoDecoder * video_decoder)229 gst_openjpeg_dec_stop (GstVideoDecoder * video_decoder)
230 {
231   GstOpenJPEGDec *self = GST_OPENJPEG_DEC (video_decoder);
232 
233   GST_DEBUG_OBJECT (self, "Stopping");
234   g_mutex_lock (&self->messages_lock);
235   gst_pad_stop_task (GST_VIDEO_DECODER_SRC_PAD (video_decoder));
236 
237   if (self->output_state) {
238     gst_video_codec_state_unref (self->output_state);
239     self->output_state = NULL;
240   }
241 
242   if (self->input_state) {
243     gst_video_codec_state_unref (self->input_state);
244     self->input_state = NULL;
245   }
246   g_mutex_unlock (&self->messages_lock);
247   GST_DEBUG_OBJECT (self, "Stopped");
248 
249   return TRUE;
250 }
251 
252 static void
gst_openjpeg_dec_finalize(GObject * object)253 gst_openjpeg_dec_finalize (GObject * object)
254 {
255   GstOpenJPEGDec *self = GST_OPENJPEG_DEC (object);
256 
257   g_mutex_clear (&self->messages_lock);
258   g_mutex_clear (&self->decoding_lock);
259   g_cond_clear (&self->messages_cond);
260 
261   G_OBJECT_CLASS (parent_class)->finalize (object);
262 }
263 
264 static GstStateChangeReturn
gst_openjpeg_dec_change_state(GstElement * element,GstStateChange transition)265 gst_openjpeg_dec_change_state (GstElement * element, GstStateChange transition)
266 {
267   GstOpenJPEGDec *self;
268   GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
269 
270   g_return_val_if_fail (GST_IS_OPENJPEG_DEC (element),
271       GST_STATE_CHANGE_FAILURE);
272   self = GST_OPENJPEG_DEC (element);
273 
274   switch (transition) {
275     case GST_STATE_CHANGE_NULL_TO_READY:
276       break;
277     case GST_STATE_CHANGE_READY_TO_PAUSED:
278       self->draining = FALSE;
279       self->started = FALSE;
280       self->flushing = FALSE;
281       break;
282     case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
283       break;
284     case GST_STATE_CHANGE_PAUSED_TO_READY:
285       self->flushing = TRUE;
286       g_mutex_lock (&self->drain_lock);
287       self->draining = FALSE;
288       g_cond_broadcast (&self->drain_cond);
289       g_mutex_unlock (&self->drain_lock);
290       break;
291     default:
292       break;
293   }
294 
295   ret =
296       GST_ELEMENT_CLASS (gst_openjpeg_dec_parent_class)->change_state
297       (element, transition);
298 
299   if (ret == GST_STATE_CHANGE_FAILURE)
300     return ret;
301 
302   switch (transition) {
303     case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
304       break;
305     case GST_STATE_CHANGE_PAUSED_TO_READY:
306       self->started = FALSE;
307       self->downstream_flow_ret = GST_FLOW_FLUSHING;
308       break;
309     case GST_STATE_CHANGE_READY_TO_NULL:
310       break;
311     default:
312       break;
313   }
314 
315   return ret;
316 }
317 
318 static void
gst_openjpeg_dec_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)319 gst_openjpeg_dec_set_property (GObject * object,
320     guint prop_id, const GValue * value, GParamSpec * pspec)
321 {
322   GstOpenJPEGDec *dec = (GstOpenJPEGDec *) object;
323 
324   switch (prop_id) {
325     case PROP_MAX_SLICE_THREADS:
326       g_atomic_int_set (&dec->max_slice_threads, g_value_get_int (value));
327       break;
328     case PROP_MAX_THREADS:
329       g_atomic_int_set (&dec->max_threads, g_value_get_int (value));
330       break;
331     default:
332       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
333       break;
334   }
335 }
336 
337 static void
gst_openjpeg_dec_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)338 gst_openjpeg_dec_get_property (GObject * object,
339     guint prop_id, GValue * value, GParamSpec * pspec)
340 {
341   GstOpenJPEGDec *dec = (GstOpenJPEGDec *) object;
342 
343   switch (prop_id) {
344     case PROP_MAX_SLICE_THREADS:
345       g_value_set_int (value, g_atomic_int_get (&dec->max_slice_threads));
346       break;
347     case PROP_MAX_THREADS:
348       g_value_set_int (value, g_atomic_int_get (&dec->max_threads));
349       break;
350     default:
351       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
352       break;
353   }
354 }
355 
356 static gboolean
gst_openjpeg_dec_set_format(GstVideoDecoder * decoder,GstVideoCodecState * state)357 gst_openjpeg_dec_set_format (GstVideoDecoder * decoder,
358     GstVideoCodecState * state)
359 {
360   GstOpenJPEGDec *self = GST_OPENJPEG_DEC (decoder);
361   GstStructure *s;
362 
363   GST_DEBUG_OBJECT (self, "Setting format: %" GST_PTR_FORMAT, state->caps);
364 
365   s = gst_caps_get_structure (state->caps, 0);
366 
367   self->color_space = OPJ_CLRSPC_UNKNOWN;
368 
369   if (gst_structure_has_name (s, "image/jp2")) {
370     self->codec_format = OPJ_CODEC_JP2;
371     self->is_jp2c = FALSE;
372   } else if (gst_structure_has_name (s, "image/x-j2c")) {
373     self->codec_format = OPJ_CODEC_J2K;
374     self->is_jp2c = TRUE;
375   } else if (gst_structure_has_name (s, "image/x-jpc") ||
376       gst_structure_has_name (s, "image/x-jpc-striped")) {
377     self->codec_format = OPJ_CODEC_J2K;
378     self->is_jp2c = FALSE;
379   } else {
380     g_return_val_if_reached (FALSE);
381   }
382 
383   if (gst_structure_has_name (s, "image/x-jpc-striped")) {
384     gst_structure_get_int (s, "num-stripes", &self->num_stripes);
385     gst_video_decoder_set_subframe_mode (decoder, TRUE);
386   } else {
387     self->num_stripes = 1;
388     gst_video_decoder_set_subframe_mode (decoder, FALSE);
389   }
390 
391   self->sampling =
392       gst_jpeg2000_sampling_from_string (gst_structure_get_string (s,
393           "sampling"));
394   if (gst_jpeg2000_sampling_is_rgb (self->sampling))
395     self->color_space = OPJ_CLRSPC_SRGB;
396   else if (gst_jpeg2000_sampling_is_mono (self->sampling))
397     self->color_space = OPJ_CLRSPC_GRAY;
398   else if (gst_jpeg2000_sampling_is_yuv (self->sampling))
399     self->color_space = OPJ_CLRSPC_SYCC;
400 
401   self->ncomps = 0;
402   gst_structure_get_int (s, "num-components", &self->ncomps);
403 
404   if (self->input_state)
405     gst_video_codec_state_unref (self->input_state);
406   self->input_state = gst_video_codec_state_ref (state);
407 
408   return TRUE;
409 }
410 
411 static gboolean
reverse_rgb_channels(GstJPEG2000Sampling sampling)412 reverse_rgb_channels (GstJPEG2000Sampling sampling)
413 {
414   return sampling == GST_JPEG2000_SAMPLING_BGR
415       || sampling == GST_JPEG2000_SAMPLING_BGRA;
416 }
417 
418 static void
fill_frame_packed8_4(GstOpenJPEGDec * self,GstVideoFrame * frame,opj_image_t * image)419 fill_frame_packed8_4 (GstOpenJPEGDec * self, GstVideoFrame * frame,
420     opj_image_t * image)
421 {
422   gint x, y, y0, y1, w, c;
423   guint8 *data_out, *tmp;
424   const gint *data_in[4];
425   gint dstride;
426   gint off[4];
427 
428   w = GST_VIDEO_FRAME_WIDTH (frame);
429   data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
430   dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
431 
432   for (c = 0; c < 4; c++) {
433     data_in[c] = image->comps[c].data;
434     off[c] = 0x80 * image->comps[c].sgnd;
435   }
436 
437   /* copy only the stripe content (image) to the full size frame */
438   y0 = image->y0;
439   y1 = image->y1;
440   GST_DEBUG_OBJECT (self, "yo=%d y1=%d", y0, y1);
441   data_out += y0 * dstride;
442   for (y = y0; y < y1; y++) {
443     tmp = data_out;
444     for (x = 0; x < w; x++) {
445       /* alpha, from 4'th input channel */
446       tmp[0] = off[3] + *data_in[3];
447       /* colour channels */
448       tmp[1] = off[0] + *data_in[0];
449       tmp[2] = off[1] + *data_in[1];
450       tmp[3] = off[2] + *data_in[2];
451 
452       tmp += 4;
453       data_in[0]++;
454       data_in[1]++;
455       data_in[2]++;
456       data_in[3]++;
457     }
458     data_out += dstride;
459   }
460 }
461 
462 static void
fill_frame_packed16_4(GstOpenJPEGDec * self,GstVideoFrame * frame,opj_image_t * image)463 fill_frame_packed16_4 (GstOpenJPEGDec * self, GstVideoFrame * frame,
464     opj_image_t * image)
465 {
466   gint x, y, y0, y1, w, c;
467   guint16 *data_out, *tmp;
468   const gint *data_in[4];
469   gint dstride;
470   gint shift[4], off[4];
471 
472   w = GST_VIDEO_FRAME_WIDTH (frame);
473   data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
474   dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2;
475 
476   for (c = 0; c < 4; c++) {
477     data_in[c] = image->comps[c].data;
478     off[c] = (1 << (image->comps[c].prec - 1)) * image->comps[c].sgnd;
479     shift[c] =
480         MAX (MIN (GST_VIDEO_FRAME_COMP_DEPTH (frame, c) - image->comps[c].prec,
481             8), 0);
482   }
483 
484   y0 = image->y0;
485   y1 = image->y1;
486   data_out += y0 * dstride;
487   for (y = y0; y < y1; y++) {
488     tmp = data_out;
489     for (x = 0; x < w; x++) {
490       /* alpha, from 4'th input channel */
491       tmp[0] = off[3] + (*data_in[3] << shift[3]);
492       /* colour channels */
493       tmp[1] = off[0] + (*data_in[0] << shift[0]);
494       tmp[2] = off[1] + (*data_in[1] << shift[1]);
495       tmp[3] = off[2] + (*data_in[2] << shift[2]);
496 
497       tmp += 4;
498       data_in[0]++;
499       data_in[1]++;
500       data_in[2]++;
501       data_in[3]++;
502     }
503     data_out += dstride;
504   }
505 }
506 
507 static void
fill_frame_packed8_3(GstOpenJPEGDec * self,GstVideoFrame * frame,opj_image_t * image)508 fill_frame_packed8_3 (GstOpenJPEGDec * self, GstVideoFrame * frame,
509     opj_image_t * image)
510 {
511   gint x, y, y0, y1, w, c;
512   guint8 *data_out, *tmp;
513   const gint *data_in[3];
514   gint dstride;
515   gint off[3];
516 
517   w = GST_VIDEO_FRAME_WIDTH (frame);
518   data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
519   dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
520 
521   for (c = 0; c < 3; c++) {
522     data_in[c] = image->comps[c].data;
523     off[c] = 0x80 * image->comps[c].sgnd;
524   };
525   y0 = image->y0;
526   y1 = image->y1;
527   data_out += y0 * dstride;
528   for (y = y0; y < y1; y++) {
529     tmp = data_out;
530     for (x = 0; x < w; x++) {
531       tmp[0] = off[0] + *data_in[0];
532       tmp[1] = off[1] + *data_in[1];
533       tmp[2] = off[2] + *data_in[2];
534       data_in[0]++;
535       data_in[1]++;
536       data_in[2]++;
537       tmp += 3;
538     }
539     data_out += dstride;
540   }
541 }
542 
543 static void
fill_frame_packed16_3(GstOpenJPEGDec * self,GstVideoFrame * frame,opj_image_t * image)544 fill_frame_packed16_3 (GstOpenJPEGDec * self, GstVideoFrame * frame,
545     opj_image_t * image)
546 {
547   gint x, y, y0, y1, w, c;
548   guint16 *data_out, *tmp;
549   const gint *data_in[3];
550   gint dstride;
551   gint shift[3], off[3];
552 
553   w = GST_VIDEO_FRAME_WIDTH (frame);
554   data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
555   dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2;
556 
557   for (c = 0; c < 3; c++) {
558     data_in[c] = image->comps[c].data;
559     off[c] = (1 << (image->comps[c].prec - 1)) * image->comps[c].sgnd;
560     shift[c] =
561         MAX (MIN (GST_VIDEO_FRAME_COMP_DEPTH (frame, c) - image->comps[c].prec,
562             8), 0);
563   }
564 
565   y0 = image->y0;
566   y1 = image->y1;
567   data_out += y0 * dstride;
568   for (y = y0; y < y1; y++) {
569     tmp = data_out;
570     for (x = 0; x < w; x++) {
571       tmp[1] = off[0] + (*data_in[0] << shift[0]);
572       tmp[2] = off[1] + (*data_in[1] << shift[1]);
573       tmp[3] = off[2] + (*data_in[2] << shift[2]);
574 
575       tmp += 4;
576       data_in[0]++;
577       data_in[1]++;
578       data_in[2]++;
579     }
580     data_out += dstride;
581   }
582 }
583 
584 /* for grayscale with alpha */
585 static void
fill_frame_packed8_2(GstOpenJPEGDec * self,GstVideoFrame * frame,opj_image_t * image)586 fill_frame_packed8_2 (GstOpenJPEGDec * self, GstVideoFrame * frame,
587     opj_image_t * image)
588 {
589   gint x, y, y0, y1, w, c;
590   guint8 *data_out, *tmp;
591   const gint *data_in[2];
592   gint dstride;
593   gint off[2];
594 
595   w = GST_VIDEO_FRAME_WIDTH (frame);
596   data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
597   dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
598 
599   for (c = 0; c < 2; c++) {
600     data_in[c] = image->comps[c].data;
601     off[c] = 0x80 * image->comps[c].sgnd;
602   };
603 
604   y0 = image->y0;
605   y1 = image->y1;
606   data_out += y0 * dstride;
607   for (y = y0; y < y1; y++) {
608     tmp = data_out;
609     for (x = 0; x < w; x++) {
610       /* alpha, from 2nd input channel */
611       tmp[0] = off[1] + *data_in[1];
612       /* luminance, from first input channel */
613       tmp[1] = off[0] + *data_in[0];
614       tmp[2] = tmp[1];
615       tmp[3] = tmp[1];
616       data_in[0]++;
617       data_in[1]++;
618       tmp += 4;
619     }
620     data_out += dstride;
621   }
622 }
623 
624 /* for grayscale with alpha */
625 static void
fill_frame_packed16_2(GstOpenJPEGDec * self,GstVideoFrame * frame,opj_image_t * image)626 fill_frame_packed16_2 (GstOpenJPEGDec * self, GstVideoFrame * frame,
627     opj_image_t * image)
628 {
629   gint x, y, y0, y1, w, c;
630   guint16 *data_out, *tmp;
631   const gint *data_in[2];
632   gint dstride;
633   gint shift[2], off[2];
634 
635   w = GST_VIDEO_FRAME_WIDTH (frame);
636   data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
637   dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2;
638 
639   for (c = 0; c < 2; c++) {
640     data_in[c] = image->comps[c].data;
641     off[c] = (1 << (image->comps[c].prec - 1)) * image->comps[c].sgnd;
642     shift[c] =
643         MAX (MIN (GST_VIDEO_FRAME_COMP_DEPTH (frame, c) - image->comps[c].prec,
644             8), 0);
645   }
646 
647   y0 = image->y0;
648   y1 = image->y1;
649   data_out += y0 * dstride;
650   for (y = y0; y < y1; y++) {
651     tmp = data_out;
652     for (x = 0; x < w; x++) {
653       /* alpha, from 2nd input channel */
654       tmp[0] = off[1] + (*data_in[1] << shift[1]);
655       /* luminance, from first input channel  */
656       tmp[1] = off[0] + (*data_in[0] << shift[0]);
657       tmp[2] = tmp[1];
658       tmp[3] = tmp[1];
659       tmp += 4;
660       data_in[0]++;
661       data_in[1]++;
662     }
663     data_out += dstride;
664   }
665 }
666 
667 
668 static void
fill_frame_planar8_1(GstOpenJPEGDec * self,GstVideoFrame * frame,opj_image_t * image)669 fill_frame_planar8_1 (GstOpenJPEGDec * self, GstVideoFrame * frame,
670     opj_image_t * image)
671 {
672   gint x, y, y0, y1, w;
673   guint8 *data_out, *tmp;
674   const gint *data_in;
675   gint dstride;
676   gint off;
677 
678   w = GST_VIDEO_FRAME_WIDTH (frame);
679   data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
680   dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
681 
682   data_in = image->comps[0].data;
683   off = 0x80 * image->comps[0].sgnd;
684 
685   y0 = image->y0;
686   y1 = image->y1;
687   data_out += y0 * dstride;
688   for (y = y0; y < y1; y++) {
689     tmp = data_out;
690     for (x = 0; x < w; x++)
691       *tmp++ = off + *data_in++;
692     data_out += dstride;
693   }
694 }
695 
696 static void
fill_frame_planar16_1(GstOpenJPEGDec * self,GstVideoFrame * frame,opj_image_t * image)697 fill_frame_planar16_1 (GstOpenJPEGDec * self, GstVideoFrame * frame,
698     opj_image_t * image)
699 {
700   gint x, y, y0, y1, w;
701   guint16 *data_out, *tmp;
702   const gint *data_in;
703   gint dstride;
704   gint shift, off;
705 
706   w = GST_VIDEO_FRAME_WIDTH (frame);
707   data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
708   dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2;
709 
710   data_in = image->comps[0].data;
711 
712   off = (1 << (image->comps[0].prec - 1)) * image->comps[0].sgnd;
713   shift =
714       MAX (MIN (GST_VIDEO_FRAME_COMP_DEPTH (frame, 0) - image->comps[0].prec,
715           8), 0);
716 
717   y0 = image->y0;
718   y1 = image->y1;
719   data_out += y0 * dstride;
720   for (y = y0; y < y1; y++) {
721     tmp = data_out;
722     for (x = 0; x < w; x++)
723       *tmp++ = off + (*data_in++ << shift);
724     data_out += dstride;
725   }
726 }
727 
728 static void
fill_frame_planar8_3(GstOpenJPEGDec * self,GstVideoFrame * frame,opj_image_t * image)729 fill_frame_planar8_3 (GstOpenJPEGDec * self, GstVideoFrame * frame,
730     opj_image_t * image)
731 {
732   gint c, x, y, y0, y1, w;
733   guint8 *data_out, *tmp;
734   const gint *data_in;
735   gint dstride, off;
736 
737   for (c = 0; c < 3; c++) {
738     opj_image_comp_t *comp = image->comps + c;
739 
740     w = GST_VIDEO_FRAME_COMP_WIDTH (frame, c);
741     dstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, c);
742     data_out = GST_VIDEO_FRAME_COMP_DATA (frame, c);
743     data_in = comp->data;
744     off = 0x80 * comp->sgnd;
745 
746     /* copy only the stripe content (image) to the full size frame */
747     y0 = comp->y0;
748     y1 = comp->y0 + comp->h;
749     data_out += y0 * dstride;
750     for (y = y0; y < y1; y++) {
751       tmp = data_out;
752       for (x = 0; x < w; x++)
753         *tmp++ = off + *data_in++;
754       data_out += dstride;
755     }
756   }
757 }
758 
759 static void
fill_frame_planar16_3(GstOpenJPEGDec * self,GstVideoFrame * frame,opj_image_t * image)760 fill_frame_planar16_3 (GstOpenJPEGDec * self, GstVideoFrame * frame,
761     opj_image_t * image)
762 {
763   gint c, x, y, y0, y1, w;
764   guint16 *data_out, *tmp;
765   const gint *data_in;
766   gint dstride;
767   gint shift, off;
768 
769   for (c = 0; c < 3; c++) {
770     opj_image_comp_t *comp = image->comps + c;
771 
772     w = GST_VIDEO_FRAME_COMP_WIDTH (frame, c);
773     dstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, c) / 2;
774     data_out = (guint16 *) GST_VIDEO_FRAME_COMP_DATA (frame, c);
775     data_in = comp->data;
776     off = (1 << (comp->prec - 1)) * comp->sgnd;
777     shift =
778         MAX (MIN (GST_VIDEO_FRAME_COMP_DEPTH (frame, c) - comp->prec, 8), 0);
779 
780     /* copy only the stripe content (image) to the full size frame */
781     y0 = comp->y0;
782     y1 = comp->y0 + comp->h;
783     data_out += y0 * dstride;
784     for (y = y0; y < y1; y++) {
785       tmp = data_out;
786       for (x = 0; x < w; x++)
787         *tmp++ = off + (*data_in++ << shift);
788       data_out += dstride;
789     }
790   }
791 }
792 
793 static void
fill_frame_planar8_3_generic(GstOpenJPEGDec * self,GstVideoFrame * frame,opj_image_t * image)794 fill_frame_planar8_3_generic (GstOpenJPEGDec * self, GstVideoFrame * frame,
795     opj_image_t * image)
796 {
797   gint x, y, y0, y1, w, c;
798   guint8 *data_out, *tmp;
799   const gint *data_in[3];
800   gint dstride;
801   gint dx[3], dy[3], off[3];
802 
803   w = GST_VIDEO_FRAME_WIDTH (frame);
804   data_out = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
805   dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
806 
807   for (c = 0; c < 3; c++) {
808     data_in[c] = image->comps[c].data;
809     dx[c] = image->comps[c].dx;
810     dy[c] = image->comps[c].dy;
811     off[c] = 0x80 * image->comps[c].sgnd;
812   }
813 
814   y0 = image->y0;
815   y1 = image->y1;
816   data_out += y0 * dstride;
817   for (y = y0; y < y1; y++) {
818     tmp = data_out;
819     for (x = 0; x < w; x++) {
820       tmp[0] = 0xff;
821       tmp[1] = off[0] + data_in[0][((y / dy[0]) * w + x) / dx[0]];
822       tmp[2] = off[1] + data_in[1][((y / dy[1]) * w + x) / dx[1]];
823       tmp[3] = off[2] + data_in[2][((y / dy[2]) * w + x) / dx[2]];
824       tmp += 4;
825     }
826     data_out += dstride;
827   }
828 }
829 
830 static void
fill_frame_planar16_3_generic(GstOpenJPEGDec * self,GstVideoFrame * frame,opj_image_t * image)831 fill_frame_planar16_3_generic (GstOpenJPEGDec * self, GstVideoFrame * frame,
832     opj_image_t * image)
833 {
834   gint x, y, y0, y1, w, c;
835   guint16 *data_out, *tmp;
836   const gint *data_in[3];
837   gint dstride;
838   gint dx[3], dy[3], shift[3], off[3];
839 
840   w = GST_VIDEO_FRAME_WIDTH (frame);
841   data_out = (guint16 *) GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
842   dstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2;
843 
844   for (c = 0; c < 3; c++) {
845     dx[c] = image->comps[c].dx;
846     dy[c] = image->comps[c].dy;
847     data_in[c] = image->comps[c].data;
848     off[c] = (1 << (image->comps[c].prec - 1)) * image->comps[c].sgnd;
849     shift[c] =
850         MAX (MIN (GST_VIDEO_FRAME_COMP_DEPTH (frame, c) - image->comps[c].prec,
851             8), 0);
852   }
853 
854   y0 = image->y0;
855   y1 = image->y1;
856   data_out += y0 * dstride;
857   for (y = y0; y < y1; y++) {
858     tmp = data_out;
859     for (x = 0; x < w; x++) {
860       tmp[0] = 0xff;
861       tmp[1] = off[0] + (data_in[0][((y / dy[0]) * w + x) / dx[0]] << shift[0]);
862       tmp[2] = off[1] + (data_in[1][((y / dy[1]) * w + x) / dx[1]] << shift[1]);
863       tmp[3] = off[2] + (data_in[2][((y / dy[2]) * w + x) / dx[2]] << shift[2]);
864       tmp += 4;
865     }
866     data_out += dstride;
867   }
868 }
869 
870 static gint
get_highest_prec(opj_image_t * image)871 get_highest_prec (opj_image_t * image)
872 {
873   gint i;
874   gint ret = 0;
875 
876   for (i = 0; i < image->numcomps; i++)
877     ret = MAX (image->comps[i].prec, ret);
878 
879   return ret;
880 }
881 
882 
883 static GstFlowReturn
gst_openjpeg_dec_negotiate(GstOpenJPEGDec * self,opj_image_t * image)884 gst_openjpeg_dec_negotiate (GstOpenJPEGDec * self, opj_image_t * image)
885 {
886   GstVideoFormat format;
887 
888   if (image->color_space == OPJ_CLRSPC_UNKNOWN || image->color_space == 0)
889     image->color_space = self->color_space;
890 
891   if (!self->input_state)
892     return GST_FLOW_FLUSHING;
893 
894   switch (image->color_space) {
895     case OPJ_CLRSPC_SRGB:
896       if (image->numcomps == 4) {
897         if (image->comps[0].dx != 1 || image->comps[0].dy != 1 ||
898             image->comps[1].dx != 1 || image->comps[1].dy != 1 ||
899             image->comps[2].dx != 1 || image->comps[2].dy != 1 ||
900             image->comps[3].dx != 1 || image->comps[3].dy != 1) {
901           GST_ERROR_OBJECT (self, "Sub-sampling for RGBA not supported");
902           return GST_FLOW_NOT_NEGOTIATED;
903         }
904 
905         if (get_highest_prec (image) == 8) {
906           self->fill_frame = fill_frame_packed8_4;
907           format =
908               reverse_rgb_channels (self->sampling) ? GST_VIDEO_FORMAT_ABGR :
909               GST_VIDEO_FORMAT_ARGB;
910 
911         } else if (get_highest_prec (image) <= 16) {
912           self->fill_frame = fill_frame_packed16_4;
913           format = GST_VIDEO_FORMAT_ARGB64;
914         } else {
915           GST_ERROR_OBJECT (self, "Unsupported depth %d", image->comps[3].prec);
916           return GST_FLOW_NOT_NEGOTIATED;
917         }
918       } else if (image->numcomps == 3) {
919         if (image->comps[0].dx != 1 || image->comps[0].dy != 1 ||
920             image->comps[1].dx != 1 || image->comps[1].dy != 1 ||
921             image->comps[2].dx != 1 || image->comps[2].dy != 1) {
922           GST_ERROR_OBJECT (self, "Sub-sampling for RGB not supported");
923           return GST_FLOW_NOT_NEGOTIATED;
924         }
925 
926         if (get_highest_prec (image) == 8) {
927           self->fill_frame = fill_frame_packed8_3;
928           format =
929               reverse_rgb_channels (self->sampling) ? GST_VIDEO_FORMAT_BGR :
930               GST_VIDEO_FORMAT_RGB;
931         } else if (get_highest_prec (image) <= 16) {
932           self->fill_frame = fill_frame_packed16_3;
933           format = GST_VIDEO_FORMAT_ARGB64;
934         } else {
935           GST_ERROR_OBJECT (self, "Unsupported depth %d",
936               get_highest_prec (image));
937           return GST_FLOW_NOT_NEGOTIATED;
938         }
939       } else {
940         GST_ERROR_OBJECT (self, "Unsupported number of RGB components: %d",
941             image->numcomps);
942         return GST_FLOW_NOT_NEGOTIATED;
943       }
944       break;
945     case OPJ_CLRSPC_GRAY:
946       if (image->numcomps == 1) {
947         if (image->comps[0].dx != 1 && image->comps[0].dy != 1) {
948           GST_ERROR_OBJECT (self, "Sub-sampling for GRAY not supported");
949           return GST_FLOW_NOT_NEGOTIATED;
950         }
951 
952         if (get_highest_prec (image) == 8) {
953           self->fill_frame = fill_frame_planar8_1;
954           format = GST_VIDEO_FORMAT_GRAY8;
955         } else if (get_highest_prec (image) <= 16) {
956           self->fill_frame = fill_frame_planar16_1;
957 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
958           format = GST_VIDEO_FORMAT_GRAY16_LE;
959 #else
960           format = GST_VIDEO_FORMAT_GRAY16_BE;
961 #endif
962         } else {
963           GST_ERROR_OBJECT (self, "Unsupported depth %d",
964               get_highest_prec (image));
965           return GST_FLOW_NOT_NEGOTIATED;
966         }
967       } else if (image->numcomps == 2) {
968         if ((image->comps[0].dx != 1 && image->comps[0].dy != 1) ||
969             (image->comps[1].dx != 1 && image->comps[1].dy != 1)) {
970           GST_ERROR_OBJECT (self, "Sub-sampling for GRAY not supported");
971           return GST_FLOW_NOT_NEGOTIATED;
972         }
973         if (get_highest_prec (image) == 8) {
974           self->fill_frame = fill_frame_packed8_2;
975           format = GST_VIDEO_FORMAT_ARGB;
976         } else if (get_highest_prec (image) <= 16) {
977           self->fill_frame = fill_frame_packed16_2;
978           format = GST_VIDEO_FORMAT_ARGB64;
979         } else {
980           GST_ERROR_OBJECT (self, "Unsupported depth %d",
981               get_highest_prec (image));
982           return GST_FLOW_NOT_NEGOTIATED;
983         }
984       } else {
985         GST_ERROR_OBJECT (self, "Unsupported number of GRAY components: %d",
986             image->numcomps);
987         return GST_FLOW_NOT_NEGOTIATED;
988       }
989       break;
990     case OPJ_CLRSPC_SYCC:
991       if (image->numcomps != 3 && image->numcomps != 4) {
992         GST_ERROR_OBJECT (self, "Unsupported number of YUV components: %d",
993             image->numcomps);
994         return GST_FLOW_NOT_NEGOTIATED;
995       }
996 
997       if (image->comps[0].dx != 1 || image->comps[0].dy != 1) {
998         GST_ERROR_OBJECT (self, "Sub-sampling of luma plane not supported");
999         return GST_FLOW_NOT_NEGOTIATED;
1000       }
1001 
1002       if (image->comps[1].dx != image->comps[2].dx ||
1003           image->comps[1].dy != image->comps[2].dy) {
1004         GST_ERROR_OBJECT (self,
1005             "Different sub-sampling of chroma planes not supported");
1006         return GST_FLOW_ERROR;
1007       }
1008 
1009       if (image->numcomps == 4) {
1010         if (image->comps[3].dx != 1 || image->comps[3].dy != 1) {
1011           GST_ERROR_OBJECT (self, "Sub-sampling of alpha plane not supported");
1012           return GST_FLOW_NOT_NEGOTIATED;
1013         }
1014 
1015         if (get_highest_prec (image) == 8) {
1016           self->fill_frame = fill_frame_packed8_4;
1017           format = GST_VIDEO_FORMAT_AYUV;
1018         } else if (image->comps[3].prec <= 16) {
1019           self->fill_frame = fill_frame_packed16_4;
1020           format = GST_VIDEO_FORMAT_AYUV64;
1021         } else {
1022           GST_ERROR_OBJECT (self, "Unsupported depth %d", image->comps[0].prec);
1023           return GST_FLOW_NOT_NEGOTIATED;
1024         }
1025       } else if (image->numcomps == 3) {
1026         if (get_highest_prec (image) == 8) {
1027           if (image->comps[1].dx == 1 && image->comps[1].dy == 1) {
1028             self->fill_frame = fill_frame_planar8_3;
1029             format = GST_VIDEO_FORMAT_Y444;
1030           } else if (image->comps[1].dx == 2 && image->comps[1].dy == 1) {
1031             self->fill_frame = fill_frame_planar8_3;
1032             format = GST_VIDEO_FORMAT_Y42B;
1033           } else if (image->comps[1].dx == 2 && image->comps[1].dy == 2) {
1034             self->fill_frame = fill_frame_planar8_3;
1035             format = GST_VIDEO_FORMAT_I420;
1036           } else if (image->comps[1].dx == 4 && image->comps[1].dy == 1) {
1037             self->fill_frame = fill_frame_planar8_3;
1038             format = GST_VIDEO_FORMAT_Y41B;
1039           } else if (image->comps[1].dx == 4 && image->comps[1].dy == 4) {
1040             self->fill_frame = fill_frame_planar8_3;
1041             format = GST_VIDEO_FORMAT_YUV9;
1042           } else {
1043             self->fill_frame = fill_frame_planar8_3_generic;
1044             format = GST_VIDEO_FORMAT_AYUV;
1045           }
1046         } else if (get_highest_prec (image) <= 16) {
1047           if (image->comps[0].prec == 10 &&
1048               image->comps[1].prec == 10 && image->comps[2].prec == 10) {
1049             if (image->comps[1].dx == 1 && image->comps[1].dy == 1) {
1050               self->fill_frame = fill_frame_planar16_3;
1051 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
1052               format = GST_VIDEO_FORMAT_Y444_10LE;
1053 #else
1054               format = GST_VIDEO_FORMAT_Y444_10BE;
1055 #endif
1056             } else if (image->comps[1].dx == 2 && image->comps[1].dy == 1) {
1057               self->fill_frame = fill_frame_planar16_3;
1058 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
1059               format = GST_VIDEO_FORMAT_I422_10LE;
1060 #else
1061               format = GST_VIDEO_FORMAT_I422_10BE;
1062 #endif
1063             } else if (image->comps[1].dx == 2 && image->comps[1].dy == 2) {
1064               self->fill_frame = fill_frame_planar16_3;
1065 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
1066               format = GST_VIDEO_FORMAT_I420_10LE;
1067 #else
1068               format = GST_VIDEO_FORMAT_I420_10BE;
1069 #endif
1070             } else {
1071               self->fill_frame = fill_frame_planar16_3_generic;
1072               format = GST_VIDEO_FORMAT_AYUV64;
1073             }
1074           } else {
1075             self->fill_frame = fill_frame_planar16_3_generic;
1076             format = GST_VIDEO_FORMAT_AYUV64;
1077           }
1078         } else {
1079           GST_ERROR_OBJECT (self, "Unsupported depth %d",
1080               get_highest_prec (image));
1081           return GST_FLOW_NOT_NEGOTIATED;
1082         }
1083       } else {
1084         GST_ERROR_OBJECT (self, "Unsupported number of YUV components: %d",
1085             image->numcomps);
1086         return GST_FLOW_NOT_NEGOTIATED;
1087       }
1088       break;
1089     default:
1090       GST_ERROR_OBJECT (self, "Unsupported colorspace %d", image->color_space);
1091       return GST_FLOW_NOT_NEGOTIATED;
1092   }
1093 
1094   if (!self->output_state ||
1095       self->output_state->info.finfo->format != format ||
1096       self->output_state->info.width != self->input_state->info.width ||
1097       self->output_state->info.height != self->input_state->info.height) {
1098     if (self->output_state)
1099       gst_video_codec_state_unref (self->output_state);
1100     self->output_state =
1101         gst_video_decoder_set_output_state (GST_VIDEO_DECODER (self), format,
1102         self->input_state->info.width, self->input_state->info.height,
1103         self->input_state);
1104     if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (self)))
1105       return GST_FLOW_NOT_NEGOTIATED;
1106   }
1107 
1108   return GST_FLOW_OK;
1109 }
1110 
1111 static void
gst_openjpeg_dec_opj_error(const char * msg,void * userdata)1112 gst_openjpeg_dec_opj_error (const char *msg, void *userdata)
1113 {
1114   GstOpenJPEGDec *self = GST_OPENJPEG_DEC (userdata);
1115   gchar *trimmed = g_strchomp (g_strdup (msg));
1116   GST_TRACE_OBJECT (self, "openjpeg error: %s", trimmed);
1117   g_free (trimmed);
1118 }
1119 
1120 static void
gst_openjpeg_dec_opj_warning(const char * msg,void * userdata)1121 gst_openjpeg_dec_opj_warning (const char *msg, void *userdata)
1122 {
1123   GstOpenJPEGDec *self = GST_OPENJPEG_DEC (userdata);
1124   gchar *trimmed = g_strchomp (g_strdup (msg));
1125   GST_TRACE_OBJECT (self, "openjpeg warning: %s", trimmed);
1126   g_free (trimmed);
1127 }
1128 
1129 static void
gst_openjpeg_dec_opj_info(const char * msg,void * userdata)1130 gst_openjpeg_dec_opj_info (const char *msg, void *userdata)
1131 {
1132   GstOpenJPEGDec *self = GST_OPENJPEG_DEC (userdata);
1133   gchar *trimmed = g_strchomp (g_strdup (msg));
1134   GST_TRACE_OBJECT (self, "openjpeg info: %s", trimmed);
1135   g_free (trimmed);
1136 }
1137 
1138 typedef struct
1139 {
1140   guint8 *data;
1141   guint offset, size;
1142 } MemStream;
1143 
1144 static OPJ_SIZE_T
read_fn(void * p_buffer,OPJ_SIZE_T p_nb_bytes,void * p_user_data)1145 read_fn (void *p_buffer, OPJ_SIZE_T p_nb_bytes, void *p_user_data)
1146 {
1147   MemStream *mstream = p_user_data;
1148   OPJ_SIZE_T read;
1149 
1150   if (mstream->offset == mstream->size)
1151     return -1;
1152 
1153   if (mstream->offset + p_nb_bytes > mstream->size)
1154     read = mstream->size - mstream->offset;
1155   else
1156     read = p_nb_bytes;
1157 
1158   memcpy (p_buffer, mstream->data + mstream->offset, read);
1159   mstream->offset += read;
1160 
1161   return read;
1162 }
1163 
1164 static OPJ_SIZE_T
write_fn(void * p_buffer,OPJ_SIZE_T p_nb_bytes,void * p_user_data)1165 write_fn (void *p_buffer, OPJ_SIZE_T p_nb_bytes, void *p_user_data)
1166 {
1167   g_return_val_if_reached (-1);
1168 }
1169 
1170 static OPJ_OFF_T
skip_fn(OPJ_OFF_T p_nb_bytes,void * p_user_data)1171 skip_fn (OPJ_OFF_T p_nb_bytes, void *p_user_data)
1172 {
1173   MemStream *mstream = p_user_data;
1174   OPJ_OFF_T skip;
1175 
1176   if (mstream->offset + p_nb_bytes > mstream->size)
1177     skip = mstream->size - mstream->offset;
1178   else
1179     skip = p_nb_bytes;
1180 
1181   mstream->offset += skip;
1182 
1183   return skip;
1184 }
1185 
1186 static OPJ_BOOL
seek_fn(OPJ_OFF_T p_nb_bytes,void * p_user_data)1187 seek_fn (OPJ_OFF_T p_nb_bytes, void *p_user_data)
1188 {
1189   MemStream *mstream = p_user_data;
1190 
1191   if (p_nb_bytes > mstream->size)
1192     return OPJ_FALSE;
1193 
1194   mstream->offset = p_nb_bytes;
1195 
1196   return OPJ_TRUE;
1197 }
1198 
1199 static gboolean
gst_openjpeg_dec_is_last_input_subframe(GstVideoDecoder * dec,GstOpenJPEGCodecMessage * message)1200 gst_openjpeg_dec_is_last_input_subframe (GstVideoDecoder * dec,
1201     GstOpenJPEGCodecMessage * message)
1202 {
1203   GstOpenJPEGDec *self = GST_OPENJPEG_DEC (dec);
1204 
1205   return (message->last_subframe || message->stripe == self->num_stripes);
1206 }
1207 
1208 static gboolean
gst_openjpeg_dec_is_last_output_subframe(GstVideoDecoder * dec,GstOpenJPEGCodecMessage * message)1209 gst_openjpeg_dec_is_last_output_subframe (GstVideoDecoder * dec,
1210     GstOpenJPEGCodecMessage * message)
1211 {
1212   GstOpenJPEGDec *self = GST_OPENJPEG_DEC (dec);
1213 
1214   return (gst_video_decoder_get_processed_subframe_index (dec,
1215           message->frame) == (self->num_stripes - 1));
1216 }
1217 
1218 
1219 static gboolean
gst_openjpeg_dec_has_pending_job_to_finish(GstOpenJPEGDec * self)1220 gst_openjpeg_dec_has_pending_job_to_finish (GstOpenJPEGDec * self)
1221 {
1222   gboolean res = FALSE;
1223   if (self->downstream_flow_ret != GST_FLOW_OK)
1224     return res;
1225   g_mutex_lock (&self->messages_lock);
1226   res = (!g_queue_is_empty (&self->messages)
1227       || (self->available_threads < self->max_slice_threads));
1228   g_mutex_unlock (&self->messages_lock);
1229   return res;
1230 }
1231 
1232 static GstOpenJPEGCodecMessage *
gst_openjpeg_decode_message_new(GstOpenJPEGDec * self,GstVideoCodecFrame * frame,int num_stripe)1233 gst_openjpeg_decode_message_new (GstOpenJPEGDec * self,
1234     GstVideoCodecFrame * frame, int num_stripe)
1235 {
1236   GstOpenJPEGCodecMessage *message = g_slice_new0 (GstOpenJPEGCodecMessage);
1237   GST_DEBUG_OBJECT (self, "message: %p", message);
1238   message->frame = gst_video_codec_frame_ref (frame);
1239   message->stripe = num_stripe;
1240   message->last_error = OPENJPEG_ERROR_NONE;
1241   message->input_buffer = gst_buffer_ref (frame->input_buffer);
1242   message->last_subframe = GST_BUFFER_FLAG_IS_SET (frame->input_buffer,
1243       GST_BUFFER_FLAG_MARKER);
1244   return message;
1245 }
1246 
1247 static GstOpenJPEGCodecMessage *
gst_openjpeg_decode_message_free(GstOpenJPEGDec * self,GstOpenJPEGCodecMessage * message)1248 gst_openjpeg_decode_message_free (GstOpenJPEGDec * self,
1249     GstOpenJPEGCodecMessage * message)
1250 {
1251   if (!message)
1252     return message;
1253   gst_buffer_unref (message->input_buffer);
1254   gst_video_codec_frame_unref (message->frame);
1255   GST_DEBUG_OBJECT (self, "message: %p", message);
1256   g_slice_free (GstOpenJPEGCodecMessage, message);
1257   return NULL;
1258 }
1259 
1260 static GstOpenJPEGCodecMessage *
gst_openjpeg_dec_wait_for_new_message(GstOpenJPEGDec * self,gboolean dry_run)1261 gst_openjpeg_dec_wait_for_new_message (GstOpenJPEGDec * self, gboolean dry_run)
1262 {
1263   GstOpenJPEGCodecMessage *message = NULL;
1264   g_mutex_lock (&self->messages_lock);
1265   if (dry_run && self->available_threads == self->max_slice_threads)
1266     goto done;
1267   if (!g_queue_is_empty (&self->messages) && !dry_run) {
1268     message = g_queue_pop_head (&self->messages);
1269   } else {
1270     g_cond_wait (&self->messages_cond, &self->messages_lock);
1271   }
1272 
1273 done:
1274   g_mutex_unlock (&self->messages_lock);
1275   return message;
1276 }
1277 
1278 static void
gst_openjpeg_dec_pause_loop(GstOpenJPEGDec * self,GstFlowReturn flow_ret)1279 gst_openjpeg_dec_pause_loop (GstOpenJPEGDec * self, GstFlowReturn flow_ret)
1280 {
1281   g_mutex_lock (&self->drain_lock);
1282   GST_DEBUG_OBJECT (self, "Pause the loop draining %d flow_ret %s",
1283       self->draining, gst_flow_get_name (flow_ret));
1284   if (self->draining) {
1285     self->draining = FALSE;
1286     g_cond_broadcast (&self->drain_cond);
1287   }
1288   gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
1289   self->downstream_flow_ret = flow_ret;
1290   self->started = FALSE;
1291   g_mutex_unlock (&self->drain_lock);
1292 }
1293 
1294 static void
gst_openjpeg_dec_loop(GstOpenJPEGDec * self)1295 gst_openjpeg_dec_loop (GstOpenJPEGDec * self)
1296 {
1297   GstOpenJPEGCodecMessage *message;
1298   GstVideoDecoder *decoder = GST_VIDEO_DECODER (self);
1299   GstFlowReturn flow_ret = GST_FLOW_OK;
1300 
1301   message = gst_openjpeg_dec_wait_for_new_message (self, FALSE);
1302   if (message) {
1303     GST_DEBUG_OBJECT (self,
1304         "received message for frame %p stripe %d last_error %d threads %d",
1305         message->frame, message->stripe, message->last_error,
1306         self->available_threads);
1307 
1308     if (self->flushing)
1309       goto flushing;
1310 
1311     if (message->last_error != OPENJPEG_ERROR_NONE)
1312       goto decode_error;
1313 
1314     g_mutex_lock (&self->decoding_lock);
1315 
1316     if (gst_openjpeg_dec_is_last_output_subframe (decoder, message))
1317       flow_ret = gst_video_decoder_finish_frame (decoder, message->frame);
1318     else
1319       gst_video_decoder_finish_subframe (decoder, message->frame);
1320     g_mutex_unlock (&self->decoding_lock);
1321     message = gst_openjpeg_decode_message_free (self, message);
1322     g_cond_broadcast (&self->messages_cond);
1323   }
1324 
1325   if (flow_ret != GST_FLOW_OK)
1326     goto flow_error;
1327 
1328   if (self->draining && !gst_openjpeg_dec_has_pending_job_to_finish (self))
1329     gst_openjpeg_dec_pause_loop (self, GST_FLOW_OK);
1330 
1331   if (self->flushing)
1332     goto flushing;
1333 
1334   return;
1335 
1336 decode_error:
1337   {
1338     GST_ELEMENT_ERROR (self, LIBRARY, FAILED, (NULL),
1339         ("OPEN JPEG decode fail %d", message->last_error));
1340     gst_video_codec_frame_unref (message->frame);
1341     gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
1342     gst_openjpeg_dec_pause_loop (self, GST_FLOW_ERROR);
1343     gst_openjpeg_decode_message_free (self, message);
1344     return;
1345   }
1346 
1347 flushing:
1348   {
1349     GST_DEBUG_OBJECT (self, "Flushing -- stopping task");
1350     if (message) {
1351       gst_video_codec_frame_unref (message->frame);
1352       gst_openjpeg_decode_message_free (self, message);
1353     }
1354     gst_openjpeg_dec_pause_loop (self, GST_FLOW_FLUSHING);
1355     return;
1356   }
1357 
1358 flow_error:
1359   {
1360     if (flow_ret == GST_FLOW_EOS) {
1361       GST_DEBUG_OBJECT (self, "EOS");
1362 
1363       gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self),
1364           gst_event_new_eos ());
1365     } else if (flow_ret < GST_FLOW_EOS) {
1366       GST_ELEMENT_ERROR (self, STREAM, FAILED,
1367           ("Internal data stream error."), ("stream stopped, reason %s",
1368               gst_flow_get_name (flow_ret)));
1369 
1370       gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self),
1371           gst_event_new_eos ());
1372     } else if (flow_ret == GST_FLOW_FLUSHING) {
1373       GST_DEBUG_OBJECT (self, "Flushing -- stopping task");
1374     }
1375     gst_openjpeg_dec_pause_loop (self, flow_ret);
1376 
1377     return;
1378   }
1379 
1380 }
1381 
1382 #define DECODE_ERROR(self, message, err_code, mutex_unlock) { \
1383       GST_WARNING_OBJECT(self, "An error occurred err_code=%d", err_code);\
1384       message->last_error = err_code; \
1385       if (mutex_unlock) \
1386         g_mutex_unlock (&self->decoding_lock);\
1387       goto done; \
1388 }
1389 
1390 static void
gst_openjpeg_dec_decode_stripe(GstElement * element,gpointer user_data)1391 gst_openjpeg_dec_decode_stripe (GstElement * element, gpointer user_data)
1392 {
1393   GstOpenJPEGDec *self = GST_OPENJPEG_DEC (element);
1394   GstVideoDecoder *decoder = GST_VIDEO_DECODER (element);
1395   GstOpenJPEGCodecMessage *message = (GstOpenJPEGCodecMessage *) user_data;
1396   GstMapInfo map;
1397   GstVideoFrame vframe;
1398   opj_codec_t *dec = NULL;
1399   opj_stream_t *stream = NULL;
1400   MemStream mstream;
1401   opj_image_t *image = NULL;
1402   opj_dparameters_t params;
1403   gint max_threads;
1404 
1405   GstFlowReturn ret;
1406   gint i;
1407 
1408   GST_DEBUG_OBJECT (self, "Start to decode stripe %p %d", message->frame,
1409       message->stripe);
1410 
1411   dec = opj_create_decompress (self->codec_format);
1412   if (!dec)
1413     DECODE_ERROR (self, message, OPENJPEG_ERROR_INIT, FALSE);
1414 
1415   if (G_UNLIKELY (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >=
1416           GST_LEVEL_TRACE)) {
1417     opj_set_info_handler (dec, gst_openjpeg_dec_opj_info, self);
1418     opj_set_warning_handler (dec, gst_openjpeg_dec_opj_warning, self);
1419     opj_set_error_handler (dec, gst_openjpeg_dec_opj_error, self);
1420   } else {
1421     opj_set_info_handler (dec, NULL, NULL);
1422     opj_set_warning_handler (dec, NULL, NULL);
1423     opj_set_error_handler (dec, NULL, NULL);
1424   }
1425 
1426   params = self->params;
1427   if (self->ncomps)
1428     params.jpwl_exp_comps = self->ncomps;
1429   if (!opj_setup_decoder (dec, &params))
1430     DECODE_ERROR (self, message, OPENJPEG_ERROR_OPEN, FALSE);
1431 
1432   max_threads = g_atomic_int_get (&self->max_threads);
1433   if (max_threads > self->num_procs)
1434     max_threads = self->num_procs;
1435   if (!opj_codec_set_threads (dec, max_threads))
1436     GST_WARNING_OBJECT (self, "Failed to set %d number of threads",
1437         max_threads);
1438 
1439   if (!gst_buffer_map (message->input_buffer, &map, GST_MAP_READ))
1440     DECODE_ERROR (self, message, OPENJPEG_ERROR_MAP_READ, FALSE);
1441 
1442 
1443   if (self->is_jp2c && map.size < 8)
1444     DECODE_ERROR (self, message, OPENJPEG_ERROR_MAP_READ, FALSE);
1445 
1446   stream = opj_stream_create (4096, OPJ_TRUE);
1447   if (!stream)
1448     DECODE_ERROR (self, message, OPENJPEG_ERROR_OPEN, FALSE);
1449 
1450   mstream.data = map.data + (self->is_jp2c ? 8 : 0);
1451   mstream.offset = 0;
1452   mstream.size = map.size - (self->is_jp2c ? 8 : 0);
1453 
1454   opj_stream_set_read_function (stream, read_fn);
1455   opj_stream_set_write_function (stream, write_fn);
1456   opj_stream_set_skip_function (stream, skip_fn);
1457   opj_stream_set_seek_function (stream, seek_fn);
1458   opj_stream_set_user_data (stream, &mstream, NULL);
1459   opj_stream_set_user_data_length (stream, mstream.size);
1460 
1461   image = NULL;
1462   if (!opj_read_header (stream, dec, &image))
1463     DECODE_ERROR (self, message, OPENJPEG_ERROR_DECODE, FALSE);
1464 
1465   if (!opj_decode (dec, stream, image))
1466     DECODE_ERROR (self, message, OPENJPEG_ERROR_DECODE, FALSE);
1467 
1468   for (i = 0; i < image->numcomps; i++) {
1469     if (image->comps[i].data == NULL)
1470       DECODE_ERROR (self, message, OPENJPEG_ERROR_DECODE, FALSE);
1471   }
1472 
1473   gst_buffer_unmap (message->input_buffer, &map);
1474 
1475   g_mutex_lock (&self->decoding_lock);
1476 
1477   ret = gst_openjpeg_dec_negotiate (self, image);
1478   if (ret != GST_FLOW_OK)
1479     DECODE_ERROR (self, message, OPENJPEG_ERROR_NEGOCIATE, TRUE);
1480 
1481   if (message->frame->output_buffer == NULL) {
1482     ret = gst_video_decoder_allocate_output_frame (decoder, message->frame);
1483     if (ret != GST_FLOW_OK)
1484       DECODE_ERROR (self, message, OPENJPEG_ERROR_ALLOCATE, TRUE);
1485   }
1486 
1487   if (!gst_video_frame_map (&vframe, &self->output_state->info,
1488           message->frame->output_buffer, GST_MAP_WRITE))
1489     DECODE_ERROR (self, message, OPENJPEG_ERROR_MAP_WRITE, TRUE);
1490 
1491   if (message->stripe)
1492     self->fill_frame (self, &vframe, image);
1493   else {
1494     GST_ERROR_OBJECT (decoder, " current_stripe should be greater than 0");
1495     DECODE_ERROR (self, message, OPENJPEG_ERROR_MAP_WRITE, TRUE);
1496   }
1497   gst_video_frame_unmap (&vframe);
1498   g_mutex_unlock (&self->decoding_lock);
1499   message->last_error = OPENJPEG_ERROR_NONE;
1500   GST_DEBUG_OBJECT (self, "Finished to decode stripe message=%p stripe=%d",
1501       message->frame, message->stripe);
1502 done:
1503   if (!message->direct) {
1504     g_mutex_lock (&self->messages_lock);
1505     self->available_threads++;
1506     g_queue_push_tail (&self->messages, message);
1507     g_mutex_unlock (&self->messages_lock);
1508     g_cond_broadcast (&self->messages_cond);
1509   }
1510 
1511   if (stream) {
1512     opj_end_decompress (dec, stream);
1513     opj_stream_destroy (stream);
1514   }
1515   if (image)
1516     opj_image_destroy (image);
1517   if (dec)
1518     opj_destroy_codec (dec);
1519 }
1520 
1521 static GstFlowReturn
gst_openjpeg_dec_decode_frame_multiple(GstVideoDecoder * decoder,GstVideoCodecFrame * frame)1522 gst_openjpeg_dec_decode_frame_multiple (GstVideoDecoder * decoder,
1523     GstVideoCodecFrame * frame)
1524 {
1525   GstOpenJPEGDec *self = GST_OPENJPEG_DEC (decoder);
1526   GstOpenJPEGCodecMessage *message = NULL;
1527   guint current_stripe =
1528       gst_video_decoder_get_input_subframe_index (decoder, frame);
1529 
1530   if (!self->started) {
1531     GST_DEBUG_OBJECT (self, "Starting task");
1532     gst_pad_start_task (GST_VIDEO_DECODER_SRC_PAD (self),
1533         (GstTaskFunction) gst_openjpeg_dec_loop, decoder, NULL);
1534     self->started = TRUE;
1535   }
1536   /* Make sure to release the base class stream lock, otherwise
1537    * _loop() can't call _finish_frame() and we might block forever
1538    * because no input buffers are released */
1539   GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1540 
1541   while (!self->available_threads)
1542     gst_openjpeg_dec_wait_for_new_message (self, TRUE);
1543 
1544   GST_VIDEO_DECODER_STREAM_LOCK (self);
1545 
1546   if (self->downstream_flow_ret != GST_FLOW_OK)
1547     return self->downstream_flow_ret;
1548 
1549   g_mutex_lock (&self->messages_lock);
1550   message = gst_openjpeg_decode_message_new (self, frame, current_stripe);
1551   GST_LOG_OBJECT (self,
1552       "About to enqueue a decoding message from frame %p stripe %d", frame,
1553       message->stripe);
1554 
1555   if (self->available_threads)
1556     self->available_threads--;
1557   g_mutex_unlock (&self->messages_lock);
1558 
1559   gst_element_call_async (GST_ELEMENT (self),
1560       (GstElementCallAsyncFunc) gst_openjpeg_dec_decode_stripe, message, NULL);
1561   if (gst_video_decoder_get_subframe_mode (decoder)
1562       && gst_openjpeg_dec_is_last_input_subframe (decoder, message))
1563     gst_video_decoder_have_last_subframe (decoder, frame);
1564   return GST_FLOW_OK;
1565 }
1566 
1567 static GstFlowReturn
gst_openjpeg_dec_decode_frame_single(GstVideoDecoder * decoder,GstVideoCodecFrame * frame)1568 gst_openjpeg_dec_decode_frame_single (GstVideoDecoder * decoder,
1569     GstVideoCodecFrame * frame)
1570 {
1571   GstOpenJPEGDec *self = GST_OPENJPEG_DEC (decoder);
1572   GstOpenJPEGCodecMessage *message = NULL;
1573   guint current_stripe =
1574       gst_video_decoder_get_input_subframe_index (decoder, frame);
1575   GstFlowReturn ret = GST_FLOW_OK;
1576 
1577   message = gst_openjpeg_decode_message_new (self, frame, current_stripe);
1578   message->direct = TRUE;
1579   gst_openjpeg_dec_decode_stripe (GST_ELEMENT (decoder), message);
1580   if (message->last_error != OPENJPEG_ERROR_NONE) {
1581     GST_WARNING_OBJECT
1582         (self, "An error occured %d during the JPEG decoding",
1583         message->last_error);
1584     self->last_error = message->last_error;
1585     ret = GST_FLOW_ERROR;
1586     goto done;
1587   }
1588   if (gst_openjpeg_dec_is_last_output_subframe (decoder, message))
1589     ret = gst_video_decoder_finish_frame (decoder, message->frame);
1590   else
1591     gst_video_decoder_finish_subframe (decoder, message->frame);
1592 
1593 done:
1594   gst_openjpeg_decode_message_free (self, message);
1595   return ret;
1596 }
1597 
1598 static gboolean
gst_openjpeg_dec_flush(GstVideoDecoder * decoder)1599 gst_openjpeg_dec_flush (GstVideoDecoder * decoder)
1600 {
1601   GstOpenJPEGDec *self = GST_OPENJPEG_DEC (decoder);
1602 
1603   GST_DEBUG_OBJECT (self, "Flushing decoder");
1604 
1605   /* 2) Wait until the srcpad loop is stopped,
1606    * unlock GST_VIDEO_DECODER_STREAM_LOCK to prevent deadlocks
1607    * caused by using this lock from inside the loop function */
1608   GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1609   gst_pad_stop_task (GST_VIDEO_DECODER_SRC_PAD (decoder));
1610   GST_DEBUG_OBJECT (self, "Flushing -- task stopped");
1611   GST_VIDEO_DECODER_STREAM_LOCK (self);
1612 
1613   /* Reset our state */
1614   self->started = FALSE;
1615   GST_DEBUG_OBJECT (self, "Flush finished");
1616 
1617   return TRUE;
1618 }
1619 
1620 static GstFlowReturn
gst_openjpeg_dec_handle_frame(GstVideoDecoder * decoder,GstVideoCodecFrame * frame)1621 gst_openjpeg_dec_handle_frame (GstVideoDecoder * decoder,
1622     GstVideoCodecFrame * frame)
1623 {
1624   GstOpenJPEGDec *self = GST_OPENJPEG_DEC (decoder);
1625   GstFlowReturn ret = GST_FLOW_OK;
1626   gint64 deadline;
1627   guint current_stripe =
1628       gst_video_decoder_get_input_subframe_index (decoder, frame);
1629 
1630   if (self->downstream_flow_ret != GST_FLOW_OK) {
1631     gst_video_codec_frame_unref (frame);
1632     return self->downstream_flow_ret;
1633   }
1634 
1635   GST_DEBUG_OBJECT (self, "Handling frame with current stripe %d",
1636       current_stripe);
1637 
1638   deadline = gst_video_decoder_get_max_decode_time (decoder, frame);
1639   if (self->drop_subframes || deadline < 0) {
1640     GST_INFO_OBJECT (self,
1641         "Dropping too late frame: deadline %" G_GINT64_FORMAT, deadline);
1642     self->drop_subframes = TRUE;
1643     if (current_stripe == self->num_stripes ||
1644         GST_BUFFER_FLAG_IS_SET (frame->input_buffer, GST_BUFFER_FLAG_MARKER)) {
1645       ret = gst_video_decoder_drop_frame (decoder, frame);
1646       self->drop_subframes = FALSE;
1647     } else {
1648       gst_video_decoder_drop_subframe (decoder, frame);
1649     }
1650 
1651     goto done;
1652   }
1653 
1654   ret = self->decode_frame (decoder, frame);
1655   if (ret != GST_FLOW_OK) {
1656     GST_WARNING_OBJECT (self, "Unable to decode the frame with flow error: %s",
1657         gst_flow_get_name (ret));
1658     goto error;
1659   }
1660 
1661 done:
1662   return ret;
1663 
1664 error:
1665   switch (self->last_error) {
1666     case OPENJPEG_ERROR_INIT:
1667       GST_ELEMENT_ERROR (self, LIBRARY, INIT,
1668           ("Failed to initialize OpenJPEG decoder"), (NULL));
1669       break;
1670     case OPENJPEG_ERROR_MAP_READ:
1671       GST_ELEMENT_ERROR (self, CORE, FAILED,
1672           ("Failed to map input buffer"), (NULL));
1673       break;
1674     case OPENJPEG_ERROR_MAP_WRITE:
1675       GST_ELEMENT_ERROR (self, CORE, FAILED,
1676           ("Failed to map input buffer"), (NULL));
1677       break;
1678     case OPENJPEG_ERROR_FILL_IMAGE:
1679       GST_ELEMENT_ERROR (self, LIBRARY, INIT,
1680           ("Failed to fill OpenJPEG image"), (NULL));
1681       break;
1682     case OPENJPEG_ERROR_OPEN:
1683       GST_ELEMENT_ERROR (self, LIBRARY, INIT,
1684           ("Failed to open OpenJPEG data"), (NULL));
1685       break;
1686     case OPENJPEG_ERROR_DECODE:
1687       GST_ELEMENT_ERROR (self, LIBRARY, INIT,
1688           ("Failed to decode OpenJPEG data"), (NULL));
1689       break;
1690     case OPENJPEG_ERROR_NEGOCIATE:
1691       GST_ELEMENT_ERROR (self, LIBRARY, INIT,
1692           ("Failed to negociate OpenJPEG data"), (NULL));
1693       break;
1694     case OPENJPEG_ERROR_ALLOCATE:
1695       GST_ELEMENT_ERROR (self, LIBRARY, INIT,
1696           ("Failed to allocate OpenJPEG data"), (NULL));
1697       break;
1698     default:
1699       GST_ELEMENT_ERROR (self, LIBRARY, INIT,
1700           ("Failed to encode OpenJPEG data"), (NULL));
1701       break;
1702   }
1703 
1704   return GST_FLOW_ERROR;
1705 }
1706 
1707 static GstFlowReturn
gst_openjpeg_dec_finish(GstVideoDecoder * decoder)1708 gst_openjpeg_dec_finish (GstVideoDecoder * decoder)
1709 {
1710   GstOpenJPEGDec *self = GST_OPENJPEG_DEC (decoder);
1711 
1712   GST_DEBUG_OBJECT (self, "Draining component");
1713 
1714   if (!self->started) {
1715     GST_DEBUG_OBJECT (self, "Component not started yet");
1716     return GST_FLOW_OK;
1717   }
1718 
1719   self->draining = TRUE;
1720   if (!gst_openjpeg_dec_has_pending_job_to_finish (self)) {
1721     GST_DEBUG_OBJECT (self, "Component ready");
1722     g_cond_broadcast (&self->messages_cond);
1723     return GST_FLOW_OK;
1724   }
1725 
1726   /* Make sure to release the base class stream lock, otherwise
1727    * _loop() can't call _finish_frame() and we might block forever
1728    * because no input buffers are released */
1729   GST_VIDEO_DECODER_STREAM_UNLOCK (self);
1730 
1731   g_mutex_lock (&self->drain_lock);
1732   GST_DEBUG_OBJECT (self, "Waiting until component is drained");
1733 
1734   while (self->draining)
1735     g_cond_wait (&self->drain_cond, &self->drain_lock);
1736 
1737   GST_DEBUG_OBJECT (self, "Drained component");
1738 
1739   g_mutex_unlock (&self->drain_lock);
1740   GST_VIDEO_DECODER_STREAM_LOCK (self);
1741   self->started = FALSE;
1742   return GST_FLOW_OK;
1743 }
1744 
1745 static gboolean
gst_openjpeg_dec_decide_allocation(GstVideoDecoder * decoder,GstQuery * query)1746 gst_openjpeg_dec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
1747 {
1748   GstBufferPool *pool;
1749   GstStructure *config;
1750 
1751   if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder,
1752           query))
1753     return FALSE;
1754 
1755   g_assert (gst_query_get_n_allocation_pools (query) > 0);
1756   gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
1757   g_assert (pool != NULL);
1758 
1759   config = gst_buffer_pool_get_config (pool);
1760   if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
1761     gst_buffer_pool_config_add_option (config,
1762         GST_BUFFER_POOL_OPTION_VIDEO_META);
1763   }
1764   gst_buffer_pool_set_config (pool, config);
1765   gst_object_unref (pool);
1766 
1767   return TRUE;
1768 }
1769