• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2012 Collabora Ltd.
3  *     Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
4  * Copyright (C) 2013 Sebastian Dröge <slomo@circular-chaos.org>
5  *
6  * This library is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Library General Public
8  * License as published by the Free Software Foundation; either
9  * version 2 of the License, or (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  * Library General Public License for more details.
15  *
16  * You should have received a copy of the GNU Library General Public
17  * License along with this library; if not, write to the
18  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
19  * Boston, MA 02110-1301, USA.
20  *
21  */
22 
23 /**
24  * SECTION:element-openjpegenc
25  * @title: openjpegenc
26  * @see_also: openjpegdec
27  *
28  * openjpegenc encodes raw video stream.
29  *
30  * ## Example launch lines
31  * |[
32  * gst-launch-1.0 -v videotestsrc num-buffers=10 ! openjpegenc ! jpeg2000parse ! openjpegdec ! videoconvert ! autovideosink sync=false
33  * ]| Encode and decode whole frames.
34  * |[
35  * gst-launch-1.0 -v videotestsrc num-buffers=10 ! openjpegenc num-threads=8 num-stripes=8 ! jpeg2000parse ! openjpegdec max-threads=8 ! videoconvert ! autovideosink sync=fals
36  * ]| Encode and decode frame split with stripes.
37  *
38  */
39 
40 #ifdef HAVE_CONFIG_H
41 #include "config.h"
42 #endif
43 
44 #include "gstopenjpegenc.h"
45 #include <gst/codecparsers/gstjpeg2000sampling.h>
46 
47 #include <string.h>
48 #include <math.h>
49 
50 GST_DEBUG_CATEGORY_STATIC (gst_openjpeg_enc_debug);
51 #define GST_CAT_DEFAULT gst_openjpeg_enc_debug
52 
53 #define GST_OPENJPEG_ENC_TYPE_PROGRESSION_ORDER (gst_openjpeg_enc_progression_order_get_type())
54 static GType
gst_openjpeg_enc_progression_order_get_type(void)55 gst_openjpeg_enc_progression_order_get_type (void)
56 {
57   static const GEnumValue values[] = {
58     {OPJ_LRCP, "LRCP", "lrcp"},
59     {OPJ_RLCP, "RLCP", "rlcp"},
60     {OPJ_RPCL, "RPCL", "rpcl"},
61     {OPJ_PCRL, "PCRL", "pcrl"},
62     {OPJ_CPRL, "CPRL", "crpl"},
63     {0, NULL, NULL}
64   };
65   static GType id = 0;
66 
67   if (g_once_init_enter ((gsize *) & id)) {
68     GType _id;
69 
70     _id = g_enum_register_static ("GstOpenJPEGEncProgressionOrder", values);
71 
72     g_once_init_leave ((gsize *) & id, _id);
73   }
74 
75   return id;
76 }
77 
78 enum
79 {
80   PROP_0,
81   PROP_NUM_LAYERS,
82   PROP_NUM_RESOLUTIONS,
83   PROP_PROGRESSION_ORDER,
84   PROP_TILE_OFFSET_X,
85   PROP_TILE_OFFSET_Y,
86   PROP_TILE_WIDTH,
87   PROP_TILE_HEIGHT,
88   PROP_NUM_STRIPES,
89   PROP_NUM_THREADS,
90   PROP_LAST
91 };
92 
93 
94 #define DEFAULT_NUM_LAYERS 1
95 #define DEFAULT_NUM_RESOLUTIONS 6
96 #define DEFAULT_PROGRESSION_ORDER OPJ_LRCP
97 #define DEFAULT_TILE_OFFSET_X 0
98 #define DEFAULT_TILE_OFFSET_Y 0
99 #define DEFAULT_TILE_WIDTH 0
100 #define DEFAULT_TILE_HEIGHT 0
101 #define GST_OPENJPEG_ENC_DEFAULT_NUM_STRIPES  1
102 #define GST_OPENJPEG_ENC_DEFAULT_NUM_THREADS 0
103 
104 /* prototypes */
105 static void gst_openjpeg_enc_finalize (GObject * object);
106 
107 static GstStateChangeReturn
108 gst_openjpeg_enc_change_state (GstElement * element, GstStateChange transition);
109 
110 static void gst_openjpeg_enc_set_property (GObject * object, guint prop_id,
111     const GValue * value, GParamSpec * pspec);
112 static void gst_openjpeg_enc_get_property (GObject * object, guint prop_id,
113     GValue * value, GParamSpec * pspec);
114 
115 static gboolean gst_openjpeg_enc_start (GstVideoEncoder * encoder);
116 static gboolean gst_openjpeg_enc_stop (GstVideoEncoder * encoder);
117 static gboolean gst_openjpeg_enc_set_format (GstVideoEncoder * encoder,
118     GstVideoCodecState * state);
119 static GstFlowReturn gst_openjpeg_enc_handle_frame (GstVideoEncoder * encoder,
120     GstVideoCodecFrame * frame);
121 static gboolean gst_openjpeg_enc_propose_allocation (GstVideoEncoder * encoder,
122     GstQuery * query);
123 static GstFlowReturn gst_openjpeg_enc_encode_frame_multiple (GstVideoEncoder *
124     encoder, GstVideoCodecFrame * frame);
125 static GstFlowReturn gst_openjpeg_enc_encode_frame_single (GstVideoEncoder *
126     encoder, GstVideoCodecFrame * frame);
127 static GstOpenJPEGCodecMessage
128     * gst_openjpeg_encode_message_free (GstOpenJPEGCodecMessage * message);
129 
130 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
131 #define GRAY16 "GRAY16_LE"
132 #define YUV10 "Y444_10LE, I422_10LE, I420_10LE"
133 #else
134 #define GRAY16 "GRAY16_BE"
135 #define YUV10 "Y444_10BE, I422_10BE, I420_10BE"
136 #endif
137 
138 static GstStaticPadTemplate gst_openjpeg_enc_sink_template =
139 GST_STATIC_PAD_TEMPLATE ("sink",
140     GST_PAD_SINK,
141     GST_PAD_ALWAYS,
142     GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ ARGB64, ARGB, xRGB, "
143             "AYUV64, " YUV10 ", "
144             "AYUV, Y444, Y42B, I420, Y41B, YUV9, " "GRAY8, " GRAY16 " }"))
145     );
146 
147 static GstStaticPadTemplate gst_openjpeg_enc_src_template =
148     GST_STATIC_PAD_TEMPLATE ("src",
149     GST_PAD_SRC,
150     GST_PAD_ALWAYS,
151     GST_STATIC_CAPS ("image/x-j2c, "
152         "width = (int) [1, MAX], "
153         "height = (int) [1, MAX], "
154         "num-components = (int) [1, 4], "
155         GST_JPEG2000_SAMPLING_LIST ","
156         GST_JPEG2000_COLORSPACE_LIST "; "
157         "image/x-jpc, "
158         "width = (int) [1, MAX], "
159         "height = (int) [1, MAX], "
160         "num-components = (int) [1, 4], "
161         "num-stripes = (int) [1, MAX], "
162         "alignment = (string) { frame, stripe }, "
163         GST_JPEG2000_SAMPLING_LIST ","
164         GST_JPEG2000_COLORSPACE_LIST "; "
165         "image/jp2, " "width = (int) [1, MAX], "
166         "height = (int) [1, MAX] ;"
167         "image/x-jpc-striped, "
168         "width = (int) [1, MAX], "
169         "height = (int) [1, MAX], "
170         "num-components = (int) [1, 4], "
171         GST_JPEG2000_SAMPLING_LIST ", "
172         GST_JPEG2000_COLORSPACE_LIST ", "
173         "num-stripes = (int) [2, MAX], stripe-height = (int) [1 , MAX]")
174     );
175 
176 #define parent_class gst_openjpeg_enc_parent_class
177 G_DEFINE_TYPE (GstOpenJPEGEnc, gst_openjpeg_enc, GST_TYPE_VIDEO_ENCODER);
178 GST_ELEMENT_REGISTER_DEFINE (openjpegenc, "openjpegenc",
179     GST_RANK_PRIMARY, GST_TYPE_OPENJPEG_ENC);
180 
181 static void
gst_openjpeg_enc_class_init(GstOpenJPEGEncClass * klass)182 gst_openjpeg_enc_class_init (GstOpenJPEGEncClass * klass)
183 {
184   GObjectClass *gobject_class;
185   GstElementClass *element_class;
186   GstVideoEncoderClass *video_encoder_class;
187 
188   gobject_class = (GObjectClass *) klass;
189   element_class = (GstElementClass *) klass;
190   video_encoder_class = (GstVideoEncoderClass *) klass;
191 
192   gobject_class->set_property = gst_openjpeg_enc_set_property;
193   gobject_class->get_property = gst_openjpeg_enc_get_property;
194   gobject_class->finalize = gst_openjpeg_enc_finalize;
195 
196   element_class->change_state =
197       GST_DEBUG_FUNCPTR (gst_openjpeg_enc_change_state);
198 
199   g_object_class_install_property (gobject_class, PROP_NUM_LAYERS,
200       g_param_spec_int ("num-layers", "Number of layers",
201           "Number of layers", 1, 10, DEFAULT_NUM_LAYERS,
202           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
203 
204   g_object_class_install_property (gobject_class, PROP_NUM_RESOLUTIONS,
205       g_param_spec_int ("num-resolutions", "Number of resolutions",
206           "Number of resolutions", 1, 10, DEFAULT_NUM_RESOLUTIONS,
207           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
208 
209   g_object_class_install_property (gobject_class, PROP_PROGRESSION_ORDER,
210       g_param_spec_enum ("progression-order", "Progression Order",
211           "Progression order", GST_OPENJPEG_ENC_TYPE_PROGRESSION_ORDER,
212           DEFAULT_PROGRESSION_ORDER,
213           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
214 
215   g_object_class_install_property (gobject_class, PROP_TILE_OFFSET_X,
216       g_param_spec_int ("tile-offset-x", "Tile Offset X",
217           "Tile Offset X", G_MININT, G_MAXINT, DEFAULT_TILE_OFFSET_X,
218           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
219 
220   g_object_class_install_property (gobject_class, PROP_TILE_OFFSET_Y,
221       g_param_spec_int ("tile-offset-y", "Tile Offset Y",
222           "Tile Offset Y", G_MININT, G_MAXINT, DEFAULT_TILE_OFFSET_Y,
223           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
224 
225   g_object_class_install_property (gobject_class, PROP_TILE_WIDTH,
226       g_param_spec_int ("tile-width", "Tile Width",
227           "Tile Width", 0, G_MAXINT, DEFAULT_TILE_WIDTH,
228           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
229 
230   g_object_class_install_property (gobject_class, PROP_TILE_HEIGHT,
231       g_param_spec_int ("tile-height", "Tile Height",
232           "Tile Height", 0, G_MAXINT, DEFAULT_TILE_HEIGHT,
233           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
234 
235   /**
236    * GstOpenJPEGEnc:num-stripes:
237    *
238    * Number of stripes to use for low latency encoding . (1 = low latency disabled)
239    *
240    * Since: 1.18
241    */
242   g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_NUM_STRIPES,
243       g_param_spec_int ("num-stripes", "Number of stripes",
244           "Number of stripes for low latency encoding. (1 = low latency disabled)",
245           1, G_MAXINT, GST_OPENJPEG_ENC_DEFAULT_NUM_STRIPES,
246           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
247   /**
248    * GstOpenJPEGEnc:num-threads:
249    *
250    * Max number of simultaneous threads to encode stripes, default: encode with streaming thread
251    *
252    * Since: 1.20
253    */
254   g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_NUM_THREADS,
255       g_param_spec_uint ("num-threads", "Number of threads",
256           "Max number of simultaneous threads to encode stripe or frame, default: encode with streaming thread.",
257           0, G_MAXINT, GST_OPENJPEG_ENC_DEFAULT_NUM_THREADS,
258           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
259 
260   gst_element_class_add_static_pad_template (element_class,
261       &gst_openjpeg_enc_src_template);
262   gst_element_class_add_static_pad_template (element_class,
263       &gst_openjpeg_enc_sink_template);
264 
265   gst_element_class_set_static_metadata (element_class,
266       "OpenJPEG JPEG2000 encoder",
267       "Codec/Encoder/Video",
268       "Encode JPEG2000 streams",
269       "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
270 
271   video_encoder_class->start = GST_DEBUG_FUNCPTR (gst_openjpeg_enc_start);
272   video_encoder_class->stop = GST_DEBUG_FUNCPTR (gst_openjpeg_enc_stop);
273   video_encoder_class->set_format =
274       GST_DEBUG_FUNCPTR (gst_openjpeg_enc_set_format);
275   video_encoder_class->handle_frame =
276       GST_DEBUG_FUNCPTR (gst_openjpeg_enc_handle_frame);
277   video_encoder_class->propose_allocation = gst_openjpeg_enc_propose_allocation;
278 
279   GST_DEBUG_CATEGORY_INIT (gst_openjpeg_enc_debug, "openjpegenc", 0,
280       "OpenJPEG Encoder");
281 
282   gst_type_mark_as_plugin_api (GST_OPENJPEG_ENC_TYPE_PROGRESSION_ORDER, 0);
283 }
284 
285 static void
gst_openjpeg_enc_init(GstOpenJPEGEnc * self)286 gst_openjpeg_enc_init (GstOpenJPEGEnc * self)
287 {
288   GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_ENCODER_SINK_PAD (self));
289 
290   opj_set_default_encoder_parameters (&self->params);
291 
292   self->params.cp_fixed_quality = 1;
293   self->params.cp_disto_alloc = 0;
294   self->params.cp_fixed_alloc = 0;
295 
296   /*
297    * TODO: Add properties / caps fields for these
298    *
299    * self->params.csty;
300    * self->params.tcp_rates;
301    * self->params.tcp_distoratio;
302    * self->params.mode;
303    * self->params.irreversible;
304    * self->params.cp_cinema;
305    * self->params.cp_rsiz;
306    */
307 
308   self->params.tcp_numlayers = DEFAULT_NUM_LAYERS;
309   self->params.numresolution = DEFAULT_NUM_RESOLUTIONS;
310   self->params.prog_order = DEFAULT_PROGRESSION_ORDER;
311   self->params.cp_tx0 = DEFAULT_TILE_OFFSET_X;
312   self->params.cp_ty0 = DEFAULT_TILE_OFFSET_Y;
313   self->params.cp_tdx = DEFAULT_TILE_WIDTH;
314   self->params.cp_tdy = DEFAULT_TILE_HEIGHT;
315   self->params.tile_size_on = (self->params.cp_tdx != 0
316       && self->params.cp_tdy != 0);
317 
318   self->num_stripes = GST_OPENJPEG_ENC_DEFAULT_NUM_STRIPES;
319   g_cond_init (&self->messages_cond);
320   g_queue_init (&self->messages);
321 
322   self->available_threads = GST_OPENJPEG_ENC_DEFAULT_NUM_THREADS;
323 }
324 
325 static void
gst_openjpeg_enc_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)326 gst_openjpeg_enc_set_property (GObject * object, guint prop_id,
327     const GValue * value, GParamSpec * pspec)
328 {
329   GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (object);
330 
331   switch (prop_id) {
332     case PROP_NUM_LAYERS:
333       self->params.tcp_numlayers = g_value_get_int (value);
334       break;
335     case PROP_NUM_RESOLUTIONS:
336       self->params.numresolution = g_value_get_int (value);
337       break;
338     case PROP_PROGRESSION_ORDER:
339       self->params.prog_order = g_value_get_enum (value);
340       break;
341     case PROP_TILE_OFFSET_X:
342       self->params.cp_tx0 = g_value_get_int (value);
343       break;
344     case PROP_TILE_OFFSET_Y:
345       self->params.cp_ty0 = g_value_get_int (value);
346       break;
347     case PROP_TILE_WIDTH:
348       self->params.cp_tdx = g_value_get_int (value);
349       self->params.tile_size_on = (self->params.cp_tdx != 0
350           && self->params.cp_tdy != 0);
351       break;
352     case PROP_TILE_HEIGHT:
353       self->params.cp_tdy = g_value_get_int (value);
354       self->params.tile_size_on = (self->params.cp_tdx != 0
355           && self->params.cp_tdy != 0);
356       break;
357     case PROP_NUM_STRIPES:
358       self->num_stripes = g_value_get_int (value);
359       break;
360     case PROP_NUM_THREADS:
361       self->available_threads = g_value_get_uint (value);
362       break;
363     default:
364       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
365       break;
366   }
367 }
368 
369 static void
gst_openjpeg_enc_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)370 gst_openjpeg_enc_get_property (GObject * object, guint prop_id, GValue * value,
371     GParamSpec * pspec)
372 {
373   GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (object);
374 
375   switch (prop_id) {
376     case PROP_NUM_LAYERS:
377       g_value_set_int (value, self->params.tcp_numlayers);
378       break;
379     case PROP_NUM_RESOLUTIONS:
380       g_value_set_int (value, self->params.numresolution);
381       break;
382     case PROP_PROGRESSION_ORDER:
383       g_value_set_enum (value, self->params.prog_order);
384       break;
385     case PROP_TILE_OFFSET_X:
386       g_value_set_int (value, self->params.cp_tx0);
387       break;
388     case PROP_TILE_OFFSET_Y:
389       g_value_set_int (value, self->params.cp_ty0);
390       break;
391     case PROP_TILE_WIDTH:
392       g_value_set_int (value, self->params.cp_tdx);
393       break;
394     case PROP_TILE_HEIGHT:
395       g_value_set_int (value, self->params.cp_tdy);
396       break;
397     case PROP_NUM_STRIPES:
398       g_value_set_int (value, self->num_stripes);
399       break;
400     case PROP_NUM_THREADS:
401       g_value_set_uint (value, self->available_threads);
402       break;
403     default:
404       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
405       break;
406   }
407 }
408 
409 static gboolean
gst_openjpeg_enc_start(GstVideoEncoder * encoder)410 gst_openjpeg_enc_start (GstVideoEncoder * encoder)
411 {
412   GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (encoder);
413 
414   GST_DEBUG_OBJECT (self, "Starting");
415   if (self->available_threads)
416     self->encode_frame = gst_openjpeg_enc_encode_frame_multiple;
417   else
418     self->encode_frame = gst_openjpeg_enc_encode_frame_single;
419 
420   return TRUE;
421 }
422 
423 static gboolean
gst_openjpeg_enc_stop(GstVideoEncoder * video_encoder)424 gst_openjpeg_enc_stop (GstVideoEncoder * video_encoder)
425 {
426   GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (video_encoder);
427 
428   GST_DEBUG_OBJECT (self, "Stopping");
429 
430   if (self->output_state) {
431     gst_video_codec_state_unref (self->output_state);
432     self->output_state = NULL;
433   }
434 
435   if (self->input_state) {
436     gst_video_codec_state_unref (self->input_state);
437     self->input_state = NULL;
438   }
439 
440   GST_DEBUG_OBJECT (self, "Stopped");
441 
442   return TRUE;
443 }
444 
445 static void
gst_openjpeg_enc_flush_messages(GstOpenJPEGEnc * self)446 gst_openjpeg_enc_flush_messages (GstOpenJPEGEnc * self)
447 {
448   GstOpenJPEGCodecMessage *enc_params;
449 
450   GST_OBJECT_LOCK (self);
451   while ((enc_params = g_queue_pop_head (&self->messages))) {
452     gst_openjpeg_encode_message_free (enc_params);
453   }
454   g_cond_broadcast (&self->messages_cond);
455   GST_OBJECT_UNLOCK (self);
456 }
457 
458 static void
gst_openjpeg_enc_finalize(GObject * object)459 gst_openjpeg_enc_finalize (GObject * object)
460 {
461   GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (object);
462 
463   gst_openjpeg_enc_flush_messages (self);
464   g_cond_clear (&self->messages_cond);
465 
466   G_OBJECT_CLASS (parent_class)->finalize (object);
467 }
468 
469 static GstStateChangeReturn
gst_openjpeg_enc_change_state(GstElement * element,GstStateChange transition)470 gst_openjpeg_enc_change_state (GstElement * element, GstStateChange transition)
471 {
472   GstOpenJPEGEnc *self;
473 
474   g_return_val_if_fail (GST_IS_OPENJPEG_ENC (element),
475       GST_STATE_CHANGE_FAILURE);
476   self = GST_OPENJPEG_ENC (element);
477 
478   switch (transition) {
479     case GST_STATE_CHANGE_NULL_TO_READY:
480       break;
481     case GST_STATE_CHANGE_READY_TO_PAUSED:
482       break;
483     case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
484       break;
485     case GST_STATE_CHANGE_PAUSED_TO_READY:
486       gst_openjpeg_enc_flush_messages (self);
487       break;
488     default:
489       break;
490   }
491 
492   return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
493 }
494 
495 static guint
get_stripe_height(GstOpenJPEGEnc * self,guint slice_num,guint frame_height)496 get_stripe_height (GstOpenJPEGEnc * self, guint slice_num, guint frame_height)
497 {
498   guint nominal_stripe_height = frame_height / self->num_stripes;
499   return (slice_num <
500       self->num_stripes -
501       1) ? nominal_stripe_height : frame_height -
502       (slice_num * nominal_stripe_height);
503 }
504 
505 static void
fill_image_packed16_4(opj_image_t * image,GstVideoFrame * frame)506 fill_image_packed16_4 (opj_image_t * image, GstVideoFrame * frame)
507 {
508   gint x, y, w, h;
509   const guint16 *data_in, *tmp;
510   gint *data_out[4];
511   gint sstride;
512 
513   w = GST_VIDEO_FRAME_WIDTH (frame);
514   h = image->y1 - image->y0;
515   sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2;
516   data_in =
517       (guint16 *) GST_VIDEO_FRAME_PLANE_DATA (frame, 0) + image->y0 * sstride;
518 
519   data_out[0] = image->comps[0].data;
520   data_out[1] = image->comps[1].data;
521   data_out[2] = image->comps[2].data;
522   data_out[3] = image->comps[3].data;
523 
524   for (y = 0; y < h; y++) {
525     tmp = data_in;
526 
527     for (x = 0; x < w; x++) {
528       *data_out[3] = tmp[0];
529       *data_out[0] = tmp[1];
530       *data_out[1] = tmp[2];
531       *data_out[2] = tmp[3];
532 
533       tmp += 4;
534       data_out[0]++;
535       data_out[1]++;
536       data_out[2]++;
537       data_out[3]++;
538     }
539     data_in += sstride;
540   }
541 }
542 
543 static void
fill_image_packed8_4(opj_image_t * image,GstVideoFrame * frame)544 fill_image_packed8_4 (opj_image_t * image, GstVideoFrame * frame)
545 {
546   gint x, y, w, h;
547   const guint8 *data_in, *tmp;
548   gint *data_out[4];
549   gint sstride;
550 
551   w = GST_VIDEO_FRAME_WIDTH (frame);
552   h = image->y1 - image->y0;
553   sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
554   data_in =
555       (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (frame, 0) + image->y0 * sstride;
556 
557   data_out[0] = image->comps[0].data;
558   data_out[1] = image->comps[1].data;
559   data_out[2] = image->comps[2].data;
560   data_out[3] = image->comps[3].data;
561 
562   for (y = 0; y < h; y++) {
563     tmp = data_in;
564 
565     for (x = 0; x < w; x++) {
566       *data_out[3] = tmp[0];
567       *data_out[0] = tmp[1];
568       *data_out[1] = tmp[2];
569       *data_out[2] = tmp[3];
570 
571       tmp += 4;
572       data_out[0]++;
573       data_out[1]++;
574       data_out[2]++;
575       data_out[3]++;
576     }
577     data_in += sstride;
578   }
579 }
580 
581 static void
fill_image_packed8_3(opj_image_t * image,GstVideoFrame * frame)582 fill_image_packed8_3 (opj_image_t * image, GstVideoFrame * frame)
583 {
584   gint x, y, w, h;
585   const guint8 *data_in, *tmp;
586   gint *data_out[3];
587   gint sstride;
588 
589   w = GST_VIDEO_FRAME_WIDTH (frame);
590   h = image->y1 - image->y0;
591   sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
592   data_in =
593       (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (frame, 0) + image->y0 * sstride;
594 
595   data_out[0] = image->comps[0].data;
596   data_out[1] = image->comps[1].data;
597   data_out[2] = image->comps[2].data;
598 
599   for (y = 0; y < h; y++) {
600     tmp = data_in;
601 
602     for (x = 0; x < w; x++) {
603       *data_out[0] = tmp[1];
604       *data_out[1] = tmp[2];
605       *data_out[2] = tmp[3];
606 
607       tmp += 4;
608       data_out[0]++;
609       data_out[1]++;
610       data_out[2]++;
611     }
612     data_in += sstride;
613   }
614 }
615 
616 static void
fill_image_planar16_3(opj_image_t * image,GstVideoFrame * frame)617 fill_image_planar16_3 (opj_image_t * image, GstVideoFrame * frame)
618 {
619   gint c, x, y, w, h;
620   const guint16 *data_in, *tmp;
621   gint *data_out;
622   gint sstride;
623 
624   for (c = 0; c < 3; c++) {
625     opj_image_comp_t *comp = image->comps + c;
626 
627     w = GST_VIDEO_FRAME_COMP_WIDTH (frame, c);
628     h = comp->h;
629     sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, c) / 2;
630     data_in =
631         (guint16 *) GST_VIDEO_FRAME_COMP_DATA (frame,
632         c) + (image->y0 / comp->dy) * sstride;
633     data_out = comp->data;
634 
635     for (y = 0; y < h; y++) {
636       tmp = data_in;
637       for (x = 0; x < w; x++) {
638         *data_out = *tmp;
639         data_out++;
640         tmp++;
641       }
642       data_in += sstride;
643     }
644   }
645 }
646 
647 static void
fill_image_planar8_3(opj_image_t * image,GstVideoFrame * frame)648 fill_image_planar8_3 (opj_image_t * image, GstVideoFrame * frame)
649 {
650   gint c, x, y, w, h;
651   const guint8 *data_in, *tmp;
652   gint *data_out;
653   gint sstride;
654 
655   for (c = 0; c < 3; c++) {
656     opj_image_comp_t *comp = image->comps + c;
657 
658     w = GST_VIDEO_FRAME_COMP_WIDTH (frame, c);
659     h = comp->h;
660     sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, c);
661     data_in =
662         (guint8 *) GST_VIDEO_FRAME_COMP_DATA (frame,
663         c) + (image->y0 / comp->dy) * sstride;
664     data_out = comp->data;
665 
666     for (y = 0; y < h; y++) {
667       tmp = data_in;
668       for (x = 0; x < w; x++) {
669         *data_out = *tmp;
670         data_out++;
671         tmp++;
672       }
673       data_in += sstride;
674     }
675   }
676 }
677 
678 static void
fill_image_planar8_1(opj_image_t * image,GstVideoFrame * frame)679 fill_image_planar8_1 (opj_image_t * image, GstVideoFrame * frame)
680 {
681   gint x, y, w, h;
682   const guint8 *data_in, *tmp;
683   gint *data_out;
684   gint sstride;
685   opj_image_comp_t *comp = image->comps;
686 
687   w = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0);
688   h = comp->h;
689   sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
690   data_in =
691       (guint8 *) GST_VIDEO_FRAME_COMP_DATA (frame,
692       0) + (image->y0 / comp->dy) * sstride;
693   data_out = image->comps[0].data;
694 
695   for (y = 0; y < h; y++) {
696     tmp = data_in;
697     for (x = 0; x < w; x++) {
698       *data_out = *tmp;
699       data_out++;
700       tmp++;
701     }
702     data_in += sstride;
703   }
704 }
705 
706 static void
fill_image_planar16_1(opj_image_t * image,GstVideoFrame * frame)707 fill_image_planar16_1 (opj_image_t * image, GstVideoFrame * frame)
708 {
709   gint x, y, w, h;
710   const guint16 *data_in, *tmp;
711   gint *data_out;
712   gint sstride;
713   opj_image_comp_t *comp = image->comps;
714 
715   w = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0);
716   h = comp->h;
717   sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2;
718   data_in =
719       (guint16 *) GST_VIDEO_FRAME_COMP_DATA (frame,
720       0) + (image->y0 / comp->dy) * sstride;
721   data_out = comp->data;
722 
723   for (y = 0; y < h; y++) {
724     tmp = data_in;
725     for (x = 0; x < w; x++) {
726       *data_out = *tmp;
727       data_out++;
728       tmp++;
729     }
730     data_in += sstride;
731   }
732 }
733 
734 static gboolean
gst_openjpeg_enc_set_format(GstVideoEncoder * encoder,GstVideoCodecState * state)735 gst_openjpeg_enc_set_format (GstVideoEncoder * encoder,
736     GstVideoCodecState * state)
737 {
738   GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (encoder);
739   GstCaps *allowed_caps, *caps;
740   GstStructure *s;
741   const gchar *colorspace = NULL;
742   GstJPEG2000Sampling sampling = GST_JPEG2000_SAMPLING_NONE;
743   gint ncomps;
744   gboolean stripe_mode =
745       self->num_stripes != GST_OPENJPEG_ENC_DEFAULT_NUM_STRIPES;
746 
747   GST_DEBUG_OBJECT (self, "Setting format: %" GST_PTR_FORMAT, state->caps);
748 
749   if (self->input_state)
750     gst_video_codec_state_unref (self->input_state);
751   self->input_state = gst_video_codec_state_ref (state);
752 
753   if (stripe_mode) {
754     GstCaps *template_caps = gst_caps_new_empty_simple ("image/x-jpc-striped");
755     GstCaps *my_caps;
756 
757     my_caps = gst_pad_query_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder),
758         template_caps);
759     gst_caps_unref (template_caps);
760 
761     allowed_caps = gst_pad_peer_query_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder),
762         my_caps);
763     gst_caps_unref (my_caps);
764 
765     if (gst_caps_is_empty (allowed_caps)) {
766       gst_caps_unref (allowed_caps);
767       GST_WARNING_OBJECT (self, "Striped JPEG 2000 not accepted downstream");
768       return FALSE;
769     }
770 
771     self->codec_format = OPJ_CODEC_J2K;
772     self->is_jp2c = FALSE;
773     allowed_caps = gst_caps_truncate (allowed_caps);
774     s = gst_caps_get_structure (allowed_caps, 0);
775   } else {
776     allowed_caps =
777         gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder));
778     allowed_caps = gst_caps_truncate (allowed_caps);
779 
780     s = gst_caps_get_structure (allowed_caps, 0);
781     if (gst_structure_has_name (s, "image/jp2")) {
782       self->codec_format = OPJ_CODEC_JP2;
783       self->is_jp2c = FALSE;
784     } else if (gst_structure_has_name (s, "image/x-j2c")) {
785       self->codec_format = OPJ_CODEC_J2K;
786       self->is_jp2c = TRUE;
787     } else if (gst_structure_has_name (s, "image/x-jpc")) {
788       self->codec_format = OPJ_CODEC_J2K;
789       self->is_jp2c = FALSE;
790     } else {
791       g_return_val_if_reached (FALSE);
792     }
793   }
794 
795   switch (state->info.finfo->format) {
796     case GST_VIDEO_FORMAT_ARGB64:
797       self->fill_image = fill_image_packed16_4;
798       ncomps = 4;
799       break;
800     case GST_VIDEO_FORMAT_ARGB:
801     case GST_VIDEO_FORMAT_AYUV:
802       self->fill_image = fill_image_packed8_4;
803       ncomps = 4;
804       break;
805     case GST_VIDEO_FORMAT_xRGB:
806       self->fill_image = fill_image_packed8_3;
807       ncomps = 3;
808       break;
809     case GST_VIDEO_FORMAT_AYUV64:
810       self->fill_image = fill_image_packed16_4;
811       ncomps = 4;
812       break;
813     case GST_VIDEO_FORMAT_Y444_10LE:
814     case GST_VIDEO_FORMAT_Y444_10BE:
815     case GST_VIDEO_FORMAT_I422_10LE:
816     case GST_VIDEO_FORMAT_I422_10BE:
817     case GST_VIDEO_FORMAT_I420_10LE:
818     case GST_VIDEO_FORMAT_I420_10BE:
819       self->fill_image = fill_image_planar16_3;
820       ncomps = 3;
821       break;
822     case GST_VIDEO_FORMAT_Y444:
823     case GST_VIDEO_FORMAT_Y42B:
824     case GST_VIDEO_FORMAT_I420:
825     case GST_VIDEO_FORMAT_Y41B:
826     case GST_VIDEO_FORMAT_YUV9:
827       self->fill_image = fill_image_planar8_3;
828       ncomps = 3;
829       break;
830     case GST_VIDEO_FORMAT_GRAY8:
831       self->fill_image = fill_image_planar8_1;
832       ncomps = 1;
833       break;
834     case GST_VIDEO_FORMAT_GRAY16_LE:
835     case GST_VIDEO_FORMAT_GRAY16_BE:
836       self->fill_image = fill_image_planar16_1;
837       ncomps = 1;
838       break;
839     default:
840       g_assert_not_reached ();
841   }
842 
843   /* sampling */
844   /* note: encoder re-orders channels so that alpha channel is encoded as the last channel */
845   switch (state->info.finfo->format) {
846     case GST_VIDEO_FORMAT_ARGB64:
847     case GST_VIDEO_FORMAT_ARGB:
848       sampling = GST_JPEG2000_SAMPLING_RGBA;
849       break;
850     case GST_VIDEO_FORMAT_AYUV64:
851     case GST_VIDEO_FORMAT_AYUV:
852       sampling = GST_JPEG2000_SAMPLING_YBRA4444_EXT;
853       break;
854     case GST_VIDEO_FORMAT_xRGB:
855       sampling = GST_JPEG2000_SAMPLING_RGB;
856       break;
857     case GST_VIDEO_FORMAT_Y444_10LE:
858     case GST_VIDEO_FORMAT_Y444_10BE:
859     case GST_VIDEO_FORMAT_Y444:
860       sampling = GST_JPEG2000_SAMPLING_YBR444;
861       break;
862 
863     case GST_VIDEO_FORMAT_I422_10LE:
864     case GST_VIDEO_FORMAT_I422_10BE:
865     case GST_VIDEO_FORMAT_Y42B:
866       sampling = GST_JPEG2000_SAMPLING_YBR422;
867       break;
868     case GST_VIDEO_FORMAT_YUV9:
869       sampling = GST_JPEG2000_SAMPLING_YBR410;
870       break;
871     case GST_VIDEO_FORMAT_Y41B:
872       sampling = GST_JPEG2000_SAMPLING_YBR411;
873       break;
874     case GST_VIDEO_FORMAT_I420_10LE:
875     case GST_VIDEO_FORMAT_I420_10BE:
876     case GST_VIDEO_FORMAT_I420:
877       sampling = GST_JPEG2000_SAMPLING_YBR420;
878       break;
879     case GST_VIDEO_FORMAT_GRAY8:
880     case GST_VIDEO_FORMAT_GRAY16_LE:
881     case GST_VIDEO_FORMAT_GRAY16_BE:
882       sampling = GST_JPEG2000_SAMPLING_GRAYSCALE;
883       break;
884     default:
885       break;
886   }
887 
888   if ((state->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_YUV)) {
889     colorspace = "sYUV";
890   } else if ((state->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_RGB)) {
891     colorspace = "sRGB";
892   } else if ((state->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_GRAY)) {
893     colorspace = "GRAY";
894   } else
895     g_return_val_if_reached (FALSE);
896 
897   if (stripe_mode) {
898     caps = gst_caps_new_simple ("image/x-jpc-striped",
899         "colorspace", G_TYPE_STRING, colorspace,
900         "sampling", G_TYPE_STRING, gst_jpeg2000_sampling_to_string (sampling),
901         "num-components", G_TYPE_INT, ncomps,
902         "num-stripes", G_TYPE_INT, self->num_stripes,
903         "stripe-height", G_TYPE_INT,
904         get_stripe_height (self, 0,
905             GST_VIDEO_INFO_COMP_HEIGHT (&state->info, 0)), NULL);
906   } else if (sampling != GST_JPEG2000_SAMPLING_NONE) {
907     caps = gst_caps_new_simple (gst_structure_get_name (s),
908         "colorspace", G_TYPE_STRING, colorspace,
909         "sampling", G_TYPE_STRING, gst_jpeg2000_sampling_to_string (sampling),
910         "num-components", G_TYPE_INT, ncomps, NULL);
911   } else {
912     caps = gst_caps_new_simple (gst_structure_get_name (s),
913         "colorspace", G_TYPE_STRING, colorspace,
914         "num-components", G_TYPE_INT, ncomps, NULL);
915   }
916   gst_caps_unref (allowed_caps);
917 
918   if (self->output_state)
919     gst_video_codec_state_unref (self->output_state);
920   self->output_state =
921       gst_video_encoder_set_output_state (encoder, caps, state);
922 
923   gst_video_encoder_negotiate (GST_VIDEO_ENCODER (encoder));
924 
925   return TRUE;
926 }
927 
928 static opj_image_t *
gst_openjpeg_enc_fill_image(GstOpenJPEGEnc * self,GstVideoFrame * frame,guint slice_num)929 gst_openjpeg_enc_fill_image (GstOpenJPEGEnc * self, GstVideoFrame * frame,
930     guint slice_num)
931 {
932   gint i, ncomps, temp, min_height = INT_MAX;
933   opj_image_cmptparm_t *comps;
934   OPJ_COLOR_SPACE colorspace;
935   opj_image_t *image;
936 
937   ncomps = GST_VIDEO_FRAME_N_COMPONENTS (frame);
938   comps = g_new0 (opj_image_cmptparm_t, ncomps);
939 
940   for (i = 0; i < ncomps; i++) {
941     comps[i].prec = GST_VIDEO_FRAME_COMP_DEPTH (frame, i);
942     comps[i].bpp = GST_VIDEO_FRAME_COMP_DEPTH (frame, i);
943     comps[i].sgnd = 0;
944     comps[i].w = GST_VIDEO_FRAME_COMP_WIDTH (frame, i);
945     comps[i].dx =
946         (guint) ((float) GST_VIDEO_FRAME_WIDTH (frame) /
947         GST_VIDEO_FRAME_COMP_WIDTH (frame, i) + 0.5f);
948     comps[i].dy =
949         (guint) ((float) GST_VIDEO_FRAME_HEIGHT (frame) /
950         GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) + 0.5f);
951     temp =
952         (GST_VIDEO_FRAME_COMP_HEIGHT (frame,
953             i) / self->num_stripes) * comps[i].dy;
954     if (temp < min_height)
955       min_height = temp;
956   }
957 
958   for (i = 0; i < ncomps; i++) {
959     gint nominal_height = min_height / comps[i].dy;
960 
961     comps[i].h = (slice_num < self->num_stripes) ?
962         nominal_height
963         : GST_VIDEO_FRAME_COMP_HEIGHT (frame,
964         i) - (self->num_stripes - 1) * nominal_height;
965 
966   }
967 
968   if ((frame->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_YUV))
969     colorspace = OPJ_CLRSPC_SYCC;
970   else if ((frame->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_RGB))
971     colorspace = OPJ_CLRSPC_SRGB;
972   else if ((frame->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_GRAY))
973     colorspace = OPJ_CLRSPC_GRAY;
974   else
975     g_return_val_if_reached (NULL);
976 
977   image = opj_image_create (ncomps, comps, colorspace);
978   if (!image) {
979     GST_WARNING_OBJECT (self,
980         "Unable to create a JPEG image. first component height=%d",
981         ncomps ? comps[0].h : 0);
982     return NULL;
983   }
984 
985   g_free (comps);
986 
987   image->x0 = 0;
988   image->x1 = GST_VIDEO_FRAME_WIDTH (frame);
989   image->y0 = (slice_num - 1) * min_height;
990   image->y1 =
991       (slice_num <
992       self->num_stripes) ? image->y0 +
993       min_height : GST_VIDEO_FRAME_HEIGHT (frame);
994   self->fill_image (image, frame);
995 
996   return image;
997 }
998 
999 static void
gst_openjpeg_enc_opj_error(const char * msg,void * userdata)1000 gst_openjpeg_enc_opj_error (const char *msg, void *userdata)
1001 {
1002   GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (userdata);
1003   gchar *trimmed = g_strchomp (g_strdup (msg));
1004   GST_TRACE_OBJECT (self, "openjpeg error: %s", trimmed);
1005   g_free (trimmed);
1006 }
1007 
1008 static void
gst_openjpeg_enc_opj_warning(const char * msg,void * userdata)1009 gst_openjpeg_enc_opj_warning (const char *msg, void *userdata)
1010 {
1011   GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (userdata);
1012   gchar *trimmed = g_strchomp (g_strdup (msg));
1013   GST_TRACE_OBJECT (self, "openjpeg warning: %s", trimmed);
1014   g_free (trimmed);
1015 }
1016 
1017 static void
gst_openjpeg_enc_opj_info(const char * msg,void * userdata)1018 gst_openjpeg_enc_opj_info (const char *msg, void *userdata)
1019 {
1020   GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (userdata);
1021   gchar *trimmed = g_strchomp (g_strdup (msg));
1022   GST_TRACE_OBJECT (self, "openjpeg info: %s", trimmed);
1023   g_free (trimmed);
1024 }
1025 
1026 typedef struct
1027 {
1028   guint8 *data;
1029   guint allocsize;
1030   guint offset;
1031   guint size;
1032 } MemStream;
1033 
1034 static OPJ_SIZE_T
read_fn(void * p_buffer,OPJ_SIZE_T p_nb_bytes,void * p_user_data)1035 read_fn (void *p_buffer, OPJ_SIZE_T p_nb_bytes, void *p_user_data)
1036 {
1037   g_return_val_if_reached (-1);
1038 }
1039 
1040 static OPJ_SIZE_T
write_fn(void * p_buffer,OPJ_SIZE_T p_nb_bytes,void * p_user_data)1041 write_fn (void *p_buffer, OPJ_SIZE_T p_nb_bytes, void *p_user_data)
1042 {
1043   MemStream *mstream = p_user_data;
1044 
1045   if (mstream->offset + p_nb_bytes > mstream->allocsize) {
1046     while (mstream->offset + p_nb_bytes > mstream->allocsize)
1047       mstream->allocsize *= 2;
1048     mstream->data = g_realloc (mstream->data, mstream->allocsize);
1049   }
1050 
1051   memcpy (mstream->data + mstream->offset, p_buffer, p_nb_bytes);
1052 
1053   if (mstream->offset + p_nb_bytes > mstream->size)
1054     mstream->size = mstream->offset + p_nb_bytes;
1055   mstream->offset += p_nb_bytes;
1056 
1057   return p_nb_bytes;
1058 }
1059 
1060 static OPJ_OFF_T
skip_fn(OPJ_OFF_T p_nb_bytes,void * p_user_data)1061 skip_fn (OPJ_OFF_T p_nb_bytes, void *p_user_data)
1062 {
1063   MemStream *mstream = p_user_data;
1064 
1065   if (mstream->offset + p_nb_bytes > mstream->allocsize) {
1066     while (mstream->offset + p_nb_bytes > mstream->allocsize)
1067       mstream->allocsize *= 2;
1068     mstream->data = g_realloc (mstream->data, mstream->allocsize);
1069   }
1070 
1071   if (mstream->offset + p_nb_bytes > mstream->size)
1072     mstream->size = mstream->offset + p_nb_bytes;
1073 
1074   mstream->offset += p_nb_bytes;
1075 
1076   return p_nb_bytes;
1077 }
1078 
1079 static OPJ_BOOL
seek_fn(OPJ_OFF_T p_nb_bytes,void * p_user_data)1080 seek_fn (OPJ_OFF_T p_nb_bytes, void *p_user_data)
1081 {
1082   MemStream *mstream = p_user_data;
1083 
1084   if (p_nb_bytes > mstream->size)
1085     return OPJ_FALSE;
1086 
1087   mstream->offset = p_nb_bytes;
1088 
1089   return OPJ_TRUE;
1090 }
1091 
1092 static gboolean
gst_openjpeg_encode_is_last_subframe(GstVideoEncoder * enc,int stripe)1093 gst_openjpeg_encode_is_last_subframe (GstVideoEncoder * enc, int stripe)
1094 {
1095   GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (enc);
1096 
1097   return (stripe == self->num_stripes);
1098 }
1099 
1100 static GstOpenJPEGCodecMessage *
gst_openjpeg_encode_message_new(GstOpenJPEGEnc * self,GstVideoCodecFrame * frame,int num_stripe)1101 gst_openjpeg_encode_message_new (GstOpenJPEGEnc * self,
1102     GstVideoCodecFrame * frame, int num_stripe)
1103 {
1104   GstOpenJPEGCodecMessage *message = g_slice_new0 (GstOpenJPEGCodecMessage);
1105 
1106   message->frame = gst_video_codec_frame_ref (frame);
1107   message->stripe = num_stripe;
1108   message->last_error = OPENJPEG_ERROR_NONE;
1109 
1110   return message;
1111 }
1112 
1113 static GstOpenJPEGCodecMessage *
gst_openjpeg_encode_message_free(GstOpenJPEGCodecMessage * message)1114 gst_openjpeg_encode_message_free (GstOpenJPEGCodecMessage * message)
1115 {
1116   if (message) {
1117     gst_video_codec_frame_unref (message->frame);
1118     if (message->output_buffer)
1119       gst_buffer_unref (message->output_buffer);
1120     g_slice_free (GstOpenJPEGCodecMessage, message);
1121   }
1122   return NULL;
1123 }
1124 
1125 #define ENCODE_ERROR(encode_params, err_code) { \
1126       encode_params->last_error = err_code; \
1127       goto done; \
1128 }
1129 
1130 /* callback method to be called asynchronously or not*/
1131 static void
gst_openjpeg_enc_encode_stripe(GstElement * element,gpointer user_data)1132 gst_openjpeg_enc_encode_stripe (GstElement * element, gpointer user_data)
1133 {
1134   GstOpenJPEGCodecMessage *message = (GstOpenJPEGCodecMessage *) user_data;
1135   GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (element);
1136   opj_codec_t *enc = NULL;
1137   opj_stream_t *stream = NULL;
1138   MemStream mstream;
1139   opj_image_t *image = NULL;
1140   GstVideoFrame vframe;
1141 
1142   GST_INFO_OBJECT (self, "Encode stripe %d/%d", message->stripe,
1143       self->num_stripes);
1144 
1145   mstream.data = NULL;
1146   enc = opj_create_compress (self->codec_format);
1147   if (!enc)
1148     ENCODE_ERROR (message, OPENJPEG_ERROR_INIT);
1149 
1150   if (G_UNLIKELY (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >=
1151           GST_LEVEL_TRACE)) {
1152     opj_set_info_handler (enc, gst_openjpeg_enc_opj_info, self);
1153     opj_set_warning_handler (enc, gst_openjpeg_enc_opj_warning, self);
1154     opj_set_error_handler (enc, gst_openjpeg_enc_opj_error, self);
1155   } else {
1156     opj_set_info_handler (enc, NULL, NULL);
1157     opj_set_warning_handler (enc, NULL, NULL);
1158     opj_set_error_handler (enc, NULL, NULL);
1159   }
1160   if (!gst_video_frame_map (&vframe, &self->input_state->info,
1161           message->frame->input_buffer, GST_MAP_READ))
1162     ENCODE_ERROR (message, OPENJPEG_ERROR_MAP_READ);
1163   image = gst_openjpeg_enc_fill_image (self, &vframe, message->stripe);
1164   gst_video_frame_unmap (&vframe);
1165   if (!image)
1166     ENCODE_ERROR (message, OPENJPEG_ERROR_FILL_IMAGE);
1167 
1168   if (vframe.info.finfo->flags & GST_VIDEO_FORMAT_FLAG_RGB) {
1169     self->params.tcp_mct = 1;
1170   }
1171   opj_setup_encoder (enc, &self->params, image);
1172   stream = opj_stream_create (4096, OPJ_FALSE);
1173   if (!stream)
1174     ENCODE_ERROR (message, OPENJPEG_ERROR_OPEN);
1175 
1176   mstream.allocsize = 4096;
1177   mstream.data = g_malloc (mstream.allocsize);
1178   mstream.offset = 0;
1179   mstream.size = 0;
1180 
1181   opj_stream_set_read_function (stream, read_fn);
1182   opj_stream_set_write_function (stream, write_fn);
1183   opj_stream_set_skip_function (stream, skip_fn);
1184   opj_stream_set_seek_function (stream, seek_fn);
1185   opj_stream_set_user_data (stream, &mstream, NULL);
1186   opj_stream_set_user_data_length (stream, mstream.size);
1187 
1188   if (!opj_start_compress (enc, image, stream))
1189     ENCODE_ERROR (message, OPENJPEG_ERROR_ENCODE);
1190 
1191   if (!opj_encode (enc, stream))
1192     ENCODE_ERROR (message, OPENJPEG_ERROR_ENCODE);
1193 
1194   if (!opj_end_compress (enc, stream))
1195     ENCODE_ERROR (message, OPENJPEG_ERROR_ENCODE);
1196 
1197   opj_image_destroy (image);
1198   image = NULL;
1199   opj_stream_destroy (stream);
1200   stream = NULL;
1201   opj_destroy_codec (enc);
1202   enc = NULL;
1203 
1204   message->output_buffer = gst_buffer_new ();
1205 
1206   if (self->is_jp2c) {
1207     GstMapInfo map;
1208     GstMemory *mem;
1209 
1210     mem = gst_allocator_alloc (NULL, 8, NULL);
1211     gst_memory_map (mem, &map, GST_MAP_WRITE);
1212     GST_WRITE_UINT32_BE (map.data, mstream.size + 8);
1213     GST_WRITE_UINT32_BE (map.data + 4, GST_MAKE_FOURCC ('j', 'p', '2', 'c'));
1214     gst_memory_unmap (mem, &map);
1215     gst_buffer_append_memory (message->output_buffer, mem);
1216   }
1217 
1218   gst_buffer_append_memory (message->output_buffer,
1219       gst_memory_new_wrapped (0, mstream.data, mstream.allocsize, 0,
1220           mstream.size, mstream.data, (GDestroyNotify) g_free));
1221   message->last_error = OPENJPEG_ERROR_NONE;
1222 
1223   GST_INFO_OBJECT (self,
1224       "Stripe %d encoded successfully, pass it to the streaming thread",
1225       message->stripe);
1226 
1227 done:
1228   if (message->last_error != OPENJPEG_ERROR_NONE) {
1229     if (mstream.data)
1230       g_free (mstream.data);
1231     if (enc)
1232       opj_destroy_codec (enc);
1233     if (image)
1234       opj_image_destroy (image);
1235     if (stream)
1236       opj_stream_destroy (stream);
1237   }
1238   if (!message->direct) {
1239     GST_OBJECT_LOCK (self);
1240     g_queue_push_tail (&self->messages, message);
1241     g_cond_signal (&self->messages_cond);
1242     GST_OBJECT_UNLOCK (self);
1243   }
1244 
1245 }
1246 
1247 static GstOpenJPEGCodecMessage *
gst_openjpeg_enc_wait_for_new_message(GstOpenJPEGEnc * self)1248 gst_openjpeg_enc_wait_for_new_message (GstOpenJPEGEnc * self)
1249 {
1250   GstOpenJPEGCodecMessage *message = NULL;
1251 
1252   GST_OBJECT_LOCK (self);
1253   while (g_queue_is_empty (&self->messages))
1254     g_cond_wait (&self->messages_cond, GST_OBJECT_GET_LOCK (self));
1255   message = g_queue_pop_head (&self->messages);
1256   GST_OBJECT_UNLOCK (self);
1257 
1258   return message;
1259 }
1260 
1261 static GstFlowReturn
gst_openjpeg_enc_encode_frame_multiple(GstVideoEncoder * encoder,GstVideoCodecFrame * frame)1262 gst_openjpeg_enc_encode_frame_multiple (GstVideoEncoder * encoder,
1263     GstVideoCodecFrame * frame)
1264 {
1265   GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (encoder);
1266   GstFlowReturn ret = GST_FLOW_OK;
1267   guint i;
1268   guint encoded_stripes = 0;
1269   guint enqueued_stripes = 0;
1270   GstOpenJPEGCodecMessage *message = NULL;
1271 
1272   /* The method receives a frame and split it into n stripes and
1273    * and create a thread per stripe to encode it.
1274    * As the number of stripes can be greater than the
1275    * available threads to encode, there is two loop, one to
1276    * count the enqueues stripes and one to count the encoded
1277    * stripes.
1278    */
1279   while (encoded_stripes < self->num_stripes) {
1280     for (i = 1;
1281         i <= self->available_threads
1282         && enqueued_stripes < (self->num_stripes - encoded_stripes); i++) {
1283       message =
1284           gst_openjpeg_encode_message_new (self, frame, i + encoded_stripes);
1285       GST_LOG_OBJECT (self,
1286           "About to enqueue an encoding message from frame %p stripe %d", frame,
1287           message->stripe);
1288       gst_element_call_async (GST_ELEMENT (self),
1289           (GstElementCallAsyncFunc) gst_openjpeg_enc_encode_stripe, message,
1290           NULL);
1291       enqueued_stripes++;
1292     }
1293     while (enqueued_stripes > 0) {
1294       message = gst_openjpeg_enc_wait_for_new_message (self);
1295       if (!message)
1296         continue;
1297       enqueued_stripes--;
1298       if (message->last_error == OPENJPEG_ERROR_NONE) {
1299         GST_LOG_OBJECT (self,
1300             "About to push frame %p stripe %d", frame, message->stripe);
1301         frame->output_buffer = gst_buffer_ref (message->output_buffer);
1302         if (gst_openjpeg_encode_is_last_subframe (encoder, encoded_stripes + 1)) {
1303           GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
1304           ret = gst_video_encoder_finish_frame (encoder, frame);
1305         } else
1306           ret = gst_video_encoder_finish_subframe (encoder, frame);
1307         if (ret != GST_FLOW_OK) {
1308           GST_WARNING_OBJECT
1309               (self, "An error occurred pushing the frame %s",
1310               gst_flow_get_name (ret));
1311           goto done;
1312         }
1313         encoded_stripes++;
1314         message = gst_openjpeg_encode_message_free (message);
1315       } else {
1316         GST_WARNING_OBJECT
1317             (self, "An error occurred %d during the JPEG encoding",
1318             message->last_error);
1319         gst_video_codec_frame_unref (frame);
1320         self->last_error = message->last_error;
1321         ret = GST_FLOW_ERROR;
1322         goto done;
1323       }
1324     }
1325   }
1326 
1327 done:
1328   gst_openjpeg_encode_message_free (message);
1329   return ret;
1330 }
1331 
1332 static GstFlowReturn
gst_openjpeg_enc_encode_frame_single(GstVideoEncoder * encoder,GstVideoCodecFrame * frame)1333 gst_openjpeg_enc_encode_frame_single (GstVideoEncoder * encoder,
1334     GstVideoCodecFrame * frame)
1335 {
1336   GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (encoder);
1337   GstFlowReturn ret = GST_FLOW_OK;
1338   guint i;
1339   GstOpenJPEGCodecMessage *message = NULL;
1340 
1341   for (i = 1; i <= self->num_stripes; ++i) {
1342     message = gst_openjpeg_encode_message_new (self, frame, i);
1343     message->direct = TRUE;
1344     gst_openjpeg_enc_encode_stripe (GST_ELEMENT (self), message);
1345     if (message->last_error != OPENJPEG_ERROR_NONE) {
1346       GST_WARNING_OBJECT
1347           (self, "An error occured %d during the JPEG encoding",
1348           message->last_error);
1349       gst_video_codec_frame_unref (frame);
1350       self->last_error = message->last_error;
1351       ret = GST_FLOW_ERROR;
1352       goto done;
1353     }
1354     frame->output_buffer = gst_buffer_ref (message->output_buffer);
1355     if (gst_openjpeg_encode_is_last_subframe (encoder, message->stripe)) {
1356       GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
1357       ret = gst_video_encoder_finish_frame (encoder, frame);
1358     } else
1359       ret = gst_video_encoder_finish_subframe (encoder, frame);
1360     if (ret != GST_FLOW_OK) {
1361       GST_WARNING_OBJECT
1362           (self, "An error occurred pushing the frame %s",
1363           gst_flow_get_name (ret));
1364       goto done;
1365     }
1366     message = gst_openjpeg_encode_message_free (message);
1367   }
1368 
1369 done:
1370   gst_openjpeg_encode_message_free (message);
1371   return ret;
1372 }
1373 
1374 static GstFlowReturn
gst_openjpeg_enc_handle_frame(GstVideoEncoder * encoder,GstVideoCodecFrame * frame)1375 gst_openjpeg_enc_handle_frame (GstVideoEncoder * encoder,
1376     GstVideoCodecFrame * frame)
1377 {
1378   GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (encoder);
1379   GstFlowReturn ret = GST_FLOW_OK;
1380   GstVideoFrame vframe;
1381   gboolean subframe_mode =
1382       self->num_stripes != GST_OPENJPEG_ENC_DEFAULT_NUM_STRIPES;
1383 
1384   GST_DEBUG_OBJECT (self, "Handling frame");
1385 
1386   if (subframe_mode) {
1387     gint min_res;
1388 
1389     /* due to limitations in openjpeg library,
1390      * number of wavelet resolutions must not exceed floor(log(stripe height)) + 1 */
1391     if (!gst_video_frame_map (&vframe, &self->input_state->info,
1392             frame->input_buffer, GST_MAP_READ)) {
1393       gst_video_codec_frame_unref (frame);
1394       self->last_error = OPENJPEG_ERROR_MAP_READ;
1395       goto error;
1396     }
1397     /* find stripe with least height */
1398     min_res =
1399         get_stripe_height (self, self->num_stripes - 1,
1400         GST_VIDEO_FRAME_COMP_HEIGHT (&vframe, 0));
1401     min_res = MIN (min_res, get_stripe_height (self, 0,
1402             GST_VIDEO_FRAME_COMP_HEIGHT (&vframe, 0)));
1403     /* take log to find correct number of wavelet resolutions */
1404     min_res = min_res > 1 ? (gint) log (min_res) + 1 : 1;
1405     self->params.numresolution = MIN (min_res + 1, self->params.numresolution);
1406     gst_video_frame_unmap (&vframe);
1407   }
1408   if (self->encode_frame (encoder, frame) != GST_FLOW_OK)
1409     goto error;
1410 
1411   return ret;
1412 
1413 error:
1414   switch (self->last_error) {
1415     case OPENJPEG_ERROR_INIT:
1416       GST_ELEMENT_ERROR (self, LIBRARY, INIT,
1417           ("Failed to initialize OpenJPEG encoder"), (NULL));
1418       break;
1419     case OPENJPEG_ERROR_MAP_READ:
1420       GST_ELEMENT_ERROR (self, CORE, FAILED,
1421           ("Failed to map input buffer"), (NULL));
1422       break;
1423     case OPENJPEG_ERROR_FILL_IMAGE:
1424       GST_ELEMENT_ERROR (self, LIBRARY, INIT,
1425           ("Failed to fill OpenJPEG image"), (NULL));
1426       break;
1427     case OPENJPEG_ERROR_OPEN:
1428       GST_ELEMENT_ERROR (self, LIBRARY, INIT,
1429           ("Failed to open OpenJPEG data"), (NULL));
1430       break;
1431     case OPENJPEG_ERROR_ENCODE:
1432       GST_ELEMENT_ERROR (self, LIBRARY, INIT,
1433           ("Failed to encode OpenJPEG data"), (NULL));
1434       break;
1435     default:
1436       GST_ELEMENT_ERROR (self, LIBRARY, INIT,
1437           ("Failed to encode OpenJPEG data"), (NULL));
1438       break;
1439   }
1440   gst_openjpeg_enc_flush_messages (self);
1441   return GST_FLOW_ERROR;
1442 }
1443 
1444 static gboolean
gst_openjpeg_enc_propose_allocation(GstVideoEncoder * encoder,GstQuery * query)1445 gst_openjpeg_enc_propose_allocation (GstVideoEncoder * encoder,
1446     GstQuery * query)
1447 {
1448   gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
1449 
1450   return GST_VIDEO_ENCODER_CLASS (parent_class)->propose_allocation (encoder,
1451       query);
1452 }
1453