1 /*
2 * Copyright (C) 2012 Collabora Ltd.
3 * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
4 * Copyright (C) 2013 Sebastian Dröge <slomo@circular-chaos.org>
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Library General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Library General Public License for more details.
15 *
16 * You should have received a copy of the GNU Library General Public
17 * License along with this library; if not, write to the
18 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
19 * Boston, MA 02110-1301, USA.
20 *
21 */
22
23 #ifdef HAVE_CONFIG_H
24 #include "config.h"
25 #endif
26
27 #include "gstopenjpegenc.h"
28 #include <gst/codecparsers/gstjpeg2000sampling.h>
29
30 #include <string.h>
31
32 GST_DEBUG_CATEGORY_STATIC (gst_openjpeg_enc_debug);
33 #define GST_CAT_DEFAULT gst_openjpeg_enc_debug
34
35 #define GST_OPENJPEG_ENC_TYPE_PROGRESSION_ORDER (gst_openjpeg_enc_progression_order_get_type())
36 static GType
gst_openjpeg_enc_progression_order_get_type(void)37 gst_openjpeg_enc_progression_order_get_type (void)
38 {
39 static const GEnumValue values[] = {
40 {OPJ_LRCP, "LRCP", "lrcp"},
41 {OPJ_RLCP, "RLCP", "rlcp"},
42 {OPJ_RPCL, "RPCL", "rpcl"},
43 {OPJ_PCRL, "PCRL", "pcrl"},
44 {OPJ_CPRL, "CPRL", "crpl"},
45 {0, NULL, NULL}
46 };
47 static volatile GType id = 0;
48
49 if (g_once_init_enter ((gsize *) & id)) {
50 GType _id;
51
52 _id = g_enum_register_static ("GstOpenJPEGEncProgressionOrder", values);
53
54 g_once_init_leave ((gsize *) & id, _id);
55 }
56
57 return id;
58 }
59
60 enum
61 {
62 PROP_0,
63 PROP_NUM_LAYERS,
64 PROP_NUM_RESOLUTIONS,
65 PROP_PROGRESSION_ORDER,
66 PROP_TILE_OFFSET_X,
67 PROP_TILE_OFFSET_Y,
68 PROP_TILE_WIDTH,
69 PROP_TILE_HEIGHT
70 };
71
72 #define DEFAULT_NUM_LAYERS 1
73 #define DEFAULT_NUM_RESOLUTIONS 6
74 #define DEFAULT_PROGRESSION_ORDER OPJ_LRCP
75 #define DEFAULT_TILE_OFFSET_X 0
76 #define DEFAULT_TILE_OFFSET_Y 0
77 #define DEFAULT_TILE_WIDTH 0
78 #define DEFAULT_TILE_HEIGHT 0
79
80 static void gst_openjpeg_enc_set_property (GObject * object, guint prop_id,
81 const GValue * value, GParamSpec * pspec);
82 static void gst_openjpeg_enc_get_property (GObject * object, guint prop_id,
83 GValue * value, GParamSpec * pspec);
84
85 static gboolean gst_openjpeg_enc_start (GstVideoEncoder * encoder);
86 static gboolean gst_openjpeg_enc_stop (GstVideoEncoder * encoder);
87 static gboolean gst_openjpeg_enc_set_format (GstVideoEncoder * encoder,
88 GstVideoCodecState * state);
89 static GstFlowReturn gst_openjpeg_enc_handle_frame (GstVideoEncoder * encoder,
90 GstVideoCodecFrame * frame);
91 static gboolean gst_openjpeg_enc_propose_allocation (GstVideoEncoder * encoder,
92 GstQuery * query);
93
94 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
95 #define GRAY16 "GRAY16_LE"
96 #define YUV10 "Y444_10LE, I422_10LE, I420_10LE"
97 #else
98 #define GRAY16 "GRAY16_BE"
99 #define YUV10 "Y444_10BE, I422_10BE, I420_10BE"
100 #endif
101
102 static GstStaticPadTemplate gst_openjpeg_enc_sink_template =
103 GST_STATIC_PAD_TEMPLATE ("sink",
104 GST_PAD_SINK,
105 GST_PAD_ALWAYS,
106 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ ARGB64, ARGB, xRGB, "
107 "AYUV64, " YUV10 ", "
108 "AYUV, Y444, Y42B, I420, Y41B, YUV9, " "GRAY8, " GRAY16 " }"))
109 );
110
111 static GstStaticPadTemplate gst_openjpeg_enc_src_template =
112 GST_STATIC_PAD_TEMPLATE ("src",
113 GST_PAD_SRC,
114 GST_PAD_ALWAYS,
115 GST_STATIC_CAPS ("image/x-j2c, "
116 "width = (int) [1, MAX], "
117 "height = (int) [1, MAX], "
118 "num-components = (int) [1, 4], "
119 GST_JPEG2000_SAMPLING_LIST ","
120 GST_JPEG2000_COLORSPACE_LIST "; "
121 "image/x-jpc, "
122 "width = (int) [1, MAX], "
123 "height = (int) [1, MAX], "
124 "num-components = (int) [1, 4], "
125 GST_JPEG2000_SAMPLING_LIST ","
126 GST_JPEG2000_COLORSPACE_LIST "; "
127 "image/jp2, " "width = (int) [1, MAX], " "height = (int) [1, MAX]")
128 );
129
130 #define parent_class gst_openjpeg_enc_parent_class
131 G_DEFINE_TYPE (GstOpenJPEGEnc, gst_openjpeg_enc, GST_TYPE_VIDEO_ENCODER);
132
133 static void
gst_openjpeg_enc_class_init(GstOpenJPEGEncClass * klass)134 gst_openjpeg_enc_class_init (GstOpenJPEGEncClass * klass)
135 {
136 GObjectClass *gobject_class;
137 GstElementClass *element_class;
138 GstVideoEncoderClass *video_encoder_class;
139
140 gobject_class = (GObjectClass *) klass;
141 element_class = (GstElementClass *) klass;
142 video_encoder_class = (GstVideoEncoderClass *) klass;
143
144 gobject_class->set_property = gst_openjpeg_enc_set_property;
145 gobject_class->get_property = gst_openjpeg_enc_get_property;
146
147 g_object_class_install_property (gobject_class, PROP_NUM_LAYERS,
148 g_param_spec_int ("num-layers", "Number of layers",
149 "Number of layers", 1, 10, DEFAULT_NUM_LAYERS,
150 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
151
152 g_object_class_install_property (gobject_class, PROP_NUM_RESOLUTIONS,
153 g_param_spec_int ("num-resolutions", "Number of resolutions",
154 "Number of resolutions", 1, 10, DEFAULT_NUM_RESOLUTIONS,
155 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
156
157 g_object_class_install_property (gobject_class, PROP_PROGRESSION_ORDER,
158 g_param_spec_enum ("progression-order", "Progression Order",
159 "Progression order", GST_OPENJPEG_ENC_TYPE_PROGRESSION_ORDER,
160 DEFAULT_PROGRESSION_ORDER,
161 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
162
163 g_object_class_install_property (gobject_class, PROP_TILE_OFFSET_X,
164 g_param_spec_int ("tile-offset-x", "Tile Offset X",
165 "Tile Offset X", G_MININT, G_MAXINT, DEFAULT_TILE_OFFSET_X,
166 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
167
168 g_object_class_install_property (gobject_class, PROP_TILE_OFFSET_Y,
169 g_param_spec_int ("tile-offset-y", "Tile Offset Y",
170 "Tile Offset Y", G_MININT, G_MAXINT, DEFAULT_TILE_OFFSET_Y,
171 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
172
173 g_object_class_install_property (gobject_class, PROP_TILE_WIDTH,
174 g_param_spec_int ("tile-width", "Tile Width",
175 "Tile Width", 0, G_MAXINT, DEFAULT_TILE_WIDTH,
176 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
177
178 g_object_class_install_property (gobject_class, PROP_TILE_HEIGHT,
179 g_param_spec_int ("tile-height", "Tile Height",
180 "Tile Height", 0, G_MAXINT, DEFAULT_TILE_HEIGHT,
181 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
182
183 gst_element_class_add_static_pad_template (element_class,
184 &gst_openjpeg_enc_src_template);
185 gst_element_class_add_static_pad_template (element_class,
186 &gst_openjpeg_enc_sink_template);
187
188 gst_element_class_set_static_metadata (element_class,
189 "OpenJPEG JPEG2000 encoder",
190 "Codec/Encoder/Video",
191 "Encode JPEG2000 streams",
192 "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
193
194 video_encoder_class->start = GST_DEBUG_FUNCPTR (gst_openjpeg_enc_start);
195 video_encoder_class->stop = GST_DEBUG_FUNCPTR (gst_openjpeg_enc_stop);
196 video_encoder_class->set_format =
197 GST_DEBUG_FUNCPTR (gst_openjpeg_enc_set_format);
198 video_encoder_class->handle_frame =
199 GST_DEBUG_FUNCPTR (gst_openjpeg_enc_handle_frame);
200 video_encoder_class->propose_allocation = gst_openjpeg_enc_propose_allocation;
201
202 GST_DEBUG_CATEGORY_INIT (gst_openjpeg_enc_debug, "openjpegenc", 0,
203 "OpenJPEG Encoder");
204 }
205
206 static void
gst_openjpeg_enc_init(GstOpenJPEGEnc * self)207 gst_openjpeg_enc_init (GstOpenJPEGEnc * self)
208 {
209 GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_ENCODER_SINK_PAD (self));
210
211 opj_set_default_encoder_parameters (&self->params);
212
213 self->params.cp_fixed_quality = 1;
214 self->params.cp_disto_alloc = 0;
215 self->params.cp_fixed_alloc = 0;
216
217 /*
218 * TODO: Add properties / caps fields for these
219 *
220 * self->params.csty;
221 * self->params.tcp_rates;
222 * self->params.tcp_distoratio;
223 * self->params.mode;
224 * self->params.irreversible;
225 * self->params.cp_cinema;
226 * self->params.cp_rsiz;
227 */
228
229 self->params.tcp_numlayers = DEFAULT_NUM_LAYERS;
230 self->params.numresolution = DEFAULT_NUM_RESOLUTIONS;
231 self->params.prog_order = DEFAULT_PROGRESSION_ORDER;
232 self->params.cp_tx0 = DEFAULT_TILE_OFFSET_X;
233 self->params.cp_ty0 = DEFAULT_TILE_OFFSET_Y;
234 self->params.cp_tdx = DEFAULT_TILE_WIDTH;
235 self->params.cp_tdy = DEFAULT_TILE_HEIGHT;
236 self->params.tile_size_on = (self->params.cp_tdx != 0
237 && self->params.cp_tdy != 0);
238 }
239
240 static void
gst_openjpeg_enc_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)241 gst_openjpeg_enc_set_property (GObject * object, guint prop_id,
242 const GValue * value, GParamSpec * pspec)
243 {
244 GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (object);
245
246 switch (prop_id) {
247 case PROP_NUM_LAYERS:
248 self->params.tcp_numlayers = g_value_get_int (value);
249 break;
250 case PROP_NUM_RESOLUTIONS:
251 self->params.numresolution = g_value_get_int (value);
252 break;
253 case PROP_PROGRESSION_ORDER:
254 self->params.prog_order = g_value_get_enum (value);
255 break;
256 case PROP_TILE_OFFSET_X:
257 self->params.cp_tx0 = g_value_get_int (value);
258 break;
259 case PROP_TILE_OFFSET_Y:
260 self->params.cp_ty0 = g_value_get_int (value);
261 break;
262 case PROP_TILE_WIDTH:
263 self->params.cp_tdx = g_value_get_int (value);
264 self->params.tile_size_on = (self->params.cp_tdx != 0
265 && self->params.cp_tdy != 0);
266 break;
267 case PROP_TILE_HEIGHT:
268 self->params.cp_tdy = g_value_get_int (value);
269 self->params.tile_size_on = (self->params.cp_tdx != 0
270 && self->params.cp_tdy != 0);
271 break;
272 default:
273 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
274 break;
275 }
276 }
277
278 static void
gst_openjpeg_enc_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)279 gst_openjpeg_enc_get_property (GObject * object, guint prop_id, GValue * value,
280 GParamSpec * pspec)
281 {
282 GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (object);
283
284 switch (prop_id) {
285 case PROP_NUM_LAYERS:
286 g_value_set_int (value, self->params.tcp_numlayers);
287 break;
288 case PROP_NUM_RESOLUTIONS:
289 g_value_set_int (value, self->params.numresolution);
290 break;
291 case PROP_PROGRESSION_ORDER:
292 g_value_set_enum (value, self->params.prog_order);
293 break;
294 case PROP_TILE_OFFSET_X:
295 g_value_set_int (value, self->params.cp_tx0);
296 break;
297 case PROP_TILE_OFFSET_Y:
298 g_value_set_int (value, self->params.cp_ty0);
299 break;
300 case PROP_TILE_WIDTH:
301 g_value_set_int (value, self->params.cp_tdx);
302 break;
303 case PROP_TILE_HEIGHT:
304 g_value_set_int (value, self->params.cp_tdy);
305 break;
306 default:
307 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
308 break;
309 }
310 }
311
312 static gboolean
gst_openjpeg_enc_start(GstVideoEncoder * encoder)313 gst_openjpeg_enc_start (GstVideoEncoder * encoder)
314 {
315 GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (encoder);
316
317 GST_DEBUG_OBJECT (self, "Starting");
318
319 return TRUE;
320 }
321
322 static gboolean
gst_openjpeg_enc_stop(GstVideoEncoder * video_encoder)323 gst_openjpeg_enc_stop (GstVideoEncoder * video_encoder)
324 {
325 GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (video_encoder);
326
327 GST_DEBUG_OBJECT (self, "Stopping");
328
329 if (self->output_state) {
330 gst_video_codec_state_unref (self->output_state);
331 self->output_state = NULL;
332 }
333
334 if (self->input_state) {
335 gst_video_codec_state_unref (self->input_state);
336 self->input_state = NULL;
337 }
338
339 GST_DEBUG_OBJECT (self, "Stopped");
340
341 return TRUE;
342 }
343
344 static void
fill_image_packed16_4(opj_image_t * image,GstVideoFrame * frame)345 fill_image_packed16_4 (opj_image_t * image, GstVideoFrame * frame)
346 {
347 gint x, y, w, h;
348 const guint16 *data_in, *tmp;
349 gint *data_out[4];
350 gint sstride;
351
352 w = GST_VIDEO_FRAME_WIDTH (frame);
353 h = GST_VIDEO_FRAME_HEIGHT (frame);
354 data_in = (guint16 *) GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
355 sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2;
356
357 data_out[0] = image->comps[0].data;
358 data_out[1] = image->comps[1].data;
359 data_out[2] = image->comps[2].data;
360 data_out[3] = image->comps[3].data;
361
362 for (y = 0; y < h; y++) {
363 tmp = data_in;
364
365 for (x = 0; x < w; x++) {
366 *data_out[3] = tmp[0];
367 *data_out[0] = tmp[1];
368 *data_out[1] = tmp[2];
369 *data_out[2] = tmp[3];
370
371 tmp += 4;
372 data_out[0]++;
373 data_out[1]++;
374 data_out[2]++;
375 data_out[3]++;
376 }
377 data_in += sstride;
378 }
379 }
380
381 static void
fill_image_packed8_4(opj_image_t * image,GstVideoFrame * frame)382 fill_image_packed8_4 (opj_image_t * image, GstVideoFrame * frame)
383 {
384 gint x, y, w, h;
385 const guint8 *data_in, *tmp;
386 gint *data_out[4];
387 gint sstride;
388
389 w = GST_VIDEO_FRAME_WIDTH (frame);
390 h = GST_VIDEO_FRAME_HEIGHT (frame);
391 data_in = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
392 sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
393
394 data_out[0] = image->comps[0].data;
395 data_out[1] = image->comps[1].data;
396 data_out[2] = image->comps[2].data;
397 data_out[3] = image->comps[3].data;
398
399 for (y = 0; y < h; y++) {
400 tmp = data_in;
401
402 for (x = 0; x < w; x++) {
403 *data_out[3] = tmp[0];
404 *data_out[0] = tmp[1];
405 *data_out[1] = tmp[2];
406 *data_out[2] = tmp[3];
407
408 tmp += 4;
409 data_out[0]++;
410 data_out[1]++;
411 data_out[2]++;
412 data_out[3]++;
413 }
414 data_in += sstride;
415 }
416 }
417
418 static void
fill_image_packed8_3(opj_image_t * image,GstVideoFrame * frame)419 fill_image_packed8_3 (opj_image_t * image, GstVideoFrame * frame)
420 {
421 gint x, y, w, h;
422 const guint8 *data_in, *tmp;
423 gint *data_out[3];
424 gint sstride;
425
426 w = GST_VIDEO_FRAME_WIDTH (frame);
427 h = GST_VIDEO_FRAME_HEIGHT (frame);
428 data_in = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
429 sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
430
431 data_out[0] = image->comps[0].data;
432 data_out[1] = image->comps[1].data;
433 data_out[2] = image->comps[2].data;
434
435 for (y = 0; y < h; y++) {
436 tmp = data_in;
437
438 for (x = 0; x < w; x++) {
439 *data_out[0] = tmp[1];
440 *data_out[1] = tmp[2];
441 *data_out[2] = tmp[3];
442
443 tmp += 4;
444 data_out[0]++;
445 data_out[1]++;
446 data_out[2]++;
447 }
448 data_in += sstride;
449 }
450 }
451
452 static void
fill_image_planar16_3(opj_image_t * image,GstVideoFrame * frame)453 fill_image_planar16_3 (opj_image_t * image, GstVideoFrame * frame)
454 {
455 gint c, x, y, w, h;
456 const guint16 *data_in, *tmp;
457 gint *data_out;
458 gint sstride;
459
460 for (c = 0; c < 3; c++) {
461 w = GST_VIDEO_FRAME_COMP_WIDTH (frame, c);
462 h = GST_VIDEO_FRAME_COMP_HEIGHT (frame, c);
463 data_in = (guint16 *) GST_VIDEO_FRAME_COMP_DATA (frame, c);
464 sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, c) / 2;
465 data_out = image->comps[c].data;
466
467 for (y = 0; y < h; y++) {
468 tmp = data_in;
469 for (x = 0; x < w; x++) {
470 *data_out = *tmp;
471 data_out++;
472 tmp++;
473 }
474 data_in += sstride;
475 }
476 }
477 }
478
479 static void
fill_image_planar8_3(opj_image_t * image,GstVideoFrame * frame)480 fill_image_planar8_3 (opj_image_t * image, GstVideoFrame * frame)
481 {
482 gint c, x, y, w, h;
483 const guint8 *data_in, *tmp;
484 gint *data_out;
485 gint sstride;
486
487 for (c = 0; c < 3; c++) {
488 w = GST_VIDEO_FRAME_COMP_WIDTH (frame, c);
489 h = GST_VIDEO_FRAME_COMP_HEIGHT (frame, c);
490 data_in = GST_VIDEO_FRAME_COMP_DATA (frame, c);
491 sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, c);
492 data_out = image->comps[c].data;
493
494 for (y = 0; y < h; y++) {
495 tmp = data_in;
496 for (x = 0; x < w; x++) {
497 *data_out = *tmp;
498 data_out++;
499 tmp++;
500 }
501 data_in += sstride;
502 }
503 }
504 }
505
506 static void
fill_image_planar8_1(opj_image_t * image,GstVideoFrame * frame)507 fill_image_planar8_1 (opj_image_t * image, GstVideoFrame * frame)
508 {
509 gint x, y, w, h;
510 const guint8 *data_in, *tmp;
511 gint *data_out;
512 gint sstride;
513
514 w = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0);
515 h = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0);
516 data_in = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
517 sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
518 data_out = image->comps[0].data;
519
520 for (y = 0; y < h; y++) {
521 tmp = data_in;
522 for (x = 0; x < w; x++) {
523 *data_out = *tmp;
524 data_out++;
525 tmp++;
526 }
527 data_in += sstride;
528 }
529 }
530
531 static void
fill_image_planar16_1(opj_image_t * image,GstVideoFrame * frame)532 fill_image_planar16_1 (opj_image_t * image, GstVideoFrame * frame)
533 {
534 gint x, y, w, h;
535 const guint16 *data_in, *tmp;
536 gint *data_out;
537 gint sstride;
538
539 w = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0);
540 h = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0);
541 data_in = (guint16 *) GST_VIDEO_FRAME_COMP_DATA (frame, 0);
542 sstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) / 2;
543 data_out = image->comps[0].data;
544
545 for (y = 0; y < h; y++) {
546 tmp = data_in;
547 for (x = 0; x < w; x++) {
548 *data_out = *tmp;
549 data_out++;
550 tmp++;
551 }
552 data_in += sstride;
553 }
554 }
555
556 static gboolean
gst_openjpeg_enc_set_format(GstVideoEncoder * encoder,GstVideoCodecState * state)557 gst_openjpeg_enc_set_format (GstVideoEncoder * encoder,
558 GstVideoCodecState * state)
559 {
560 GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (encoder);
561 GstCaps *allowed_caps, *caps;
562 GstStructure *s;
563 const gchar *colorspace = NULL;
564 GstJPEG2000Sampling sampling = GST_JPEG2000_SAMPLING_NONE;
565 gint ncomps;
566
567 GST_DEBUG_OBJECT (self, "Setting format: %" GST_PTR_FORMAT, state->caps);
568
569 if (self->input_state)
570 gst_video_codec_state_unref (self->input_state);
571 self->input_state = gst_video_codec_state_ref (state);
572
573 allowed_caps = gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder));
574 allowed_caps = gst_caps_truncate (allowed_caps);
575 s = gst_caps_get_structure (allowed_caps, 0);
576 if (gst_structure_has_name (s, "image/jp2")) {
577 self->codec_format = OPJ_CODEC_JP2;
578 self->is_jp2c = FALSE;
579 } else if (gst_structure_has_name (s, "image/x-j2c")) {
580 self->codec_format = OPJ_CODEC_J2K;
581 self->is_jp2c = TRUE;
582 } else if (gst_structure_has_name (s, "image/x-jpc")) {
583 self->codec_format = OPJ_CODEC_J2K;
584 self->is_jp2c = FALSE;
585 } else {
586 g_return_val_if_reached (FALSE);
587 }
588
589 switch (state->info.finfo->format) {
590 case GST_VIDEO_FORMAT_ARGB64:
591 self->fill_image = fill_image_packed16_4;
592 ncomps = 4;
593 break;
594 case GST_VIDEO_FORMAT_ARGB:
595 case GST_VIDEO_FORMAT_AYUV:
596 self->fill_image = fill_image_packed8_4;
597 ncomps = 4;
598 break;
599 case GST_VIDEO_FORMAT_xRGB:
600 self->fill_image = fill_image_packed8_3;
601 ncomps = 3;
602 break;
603 case GST_VIDEO_FORMAT_AYUV64:
604 self->fill_image = fill_image_packed16_4;
605 ncomps = 4;
606 break;
607 case GST_VIDEO_FORMAT_Y444_10LE:
608 case GST_VIDEO_FORMAT_Y444_10BE:
609 case GST_VIDEO_FORMAT_I422_10LE:
610 case GST_VIDEO_FORMAT_I422_10BE:
611 case GST_VIDEO_FORMAT_I420_10LE:
612 case GST_VIDEO_FORMAT_I420_10BE:
613 self->fill_image = fill_image_planar16_3;
614 ncomps = 3;
615 break;
616 case GST_VIDEO_FORMAT_Y444:
617 case GST_VIDEO_FORMAT_Y42B:
618 case GST_VIDEO_FORMAT_I420:
619 case GST_VIDEO_FORMAT_Y41B:
620 case GST_VIDEO_FORMAT_YUV9:
621 self->fill_image = fill_image_planar8_3;
622 ncomps = 3;
623 break;
624 case GST_VIDEO_FORMAT_GRAY8:
625 self->fill_image = fill_image_planar8_1;
626 ncomps = 1;
627 break;
628 case GST_VIDEO_FORMAT_GRAY16_LE:
629 case GST_VIDEO_FORMAT_GRAY16_BE:
630 self->fill_image = fill_image_planar16_1;
631 ncomps = 1;
632 break;
633 default:
634 g_assert_not_reached ();
635 }
636
637
638 /* sampling */
639 /* note: encoder re-orders channels so that alpha channel is encoded as the last channel */
640 switch (state->info.finfo->format) {
641 case GST_VIDEO_FORMAT_ARGB64:
642 case GST_VIDEO_FORMAT_ARGB:
643 sampling = GST_JPEG2000_SAMPLING_RGBA;
644 break;
645 case GST_VIDEO_FORMAT_AYUV64:
646 case GST_VIDEO_FORMAT_AYUV:
647 sampling = GST_JPEG2000_SAMPLING_YBRA4444_EXT;
648 break;
649 case GST_VIDEO_FORMAT_xRGB:
650 sampling = GST_JPEG2000_SAMPLING_RGB;
651 break;
652 case GST_VIDEO_FORMAT_Y444_10LE:
653 case GST_VIDEO_FORMAT_Y444_10BE:
654 case GST_VIDEO_FORMAT_Y444:
655 sampling = GST_JPEG2000_SAMPLING_YBR444;
656 break;
657
658 case GST_VIDEO_FORMAT_I422_10LE:
659 case GST_VIDEO_FORMAT_I422_10BE:
660 case GST_VIDEO_FORMAT_Y42B:
661 sampling = GST_JPEG2000_SAMPLING_YBR422;
662 break;
663 case GST_VIDEO_FORMAT_YUV9:
664 sampling = GST_JPEG2000_SAMPLING_YBR410;
665 break;
666 case GST_VIDEO_FORMAT_I420_10LE:
667 case GST_VIDEO_FORMAT_I420_10BE:
668 case GST_VIDEO_FORMAT_I420:
669 sampling = GST_JPEG2000_SAMPLING_YBR420;
670 break;
671 case GST_VIDEO_FORMAT_GRAY8:
672 case GST_VIDEO_FORMAT_GRAY16_LE:
673 case GST_VIDEO_FORMAT_GRAY16_BE:
674 sampling = GST_JPEG2000_SAMPLING_GRAYSCALE;
675 break;
676 default:
677 break;
678 }
679
680
681
682 if ((state->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_YUV)) {
683 colorspace = "sYUV";
684 } else if ((state->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_RGB)) {
685 colorspace = "sRGB";
686 } else if ((state->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_GRAY)) {
687 colorspace = "GRAY";
688 } else
689 g_return_val_if_reached (FALSE);
690
691 if (sampling != GST_JPEG2000_SAMPLING_NONE) {
692 caps = gst_caps_new_simple (gst_structure_get_name (s),
693 "colorspace", G_TYPE_STRING, colorspace,
694 "sampling", G_TYPE_STRING, gst_jpeg2000_sampling_to_string (sampling),
695 "num-components", G_TYPE_INT, ncomps, NULL);
696 } else {
697 caps = gst_caps_new_simple (gst_structure_get_name (s),
698 "colorspace", G_TYPE_STRING, colorspace,
699 "num-components", G_TYPE_INT, ncomps, NULL);
700
701 }
702 gst_caps_unref (allowed_caps);
703
704 if (self->output_state)
705 gst_video_codec_state_unref (self->output_state);
706 self->output_state =
707 gst_video_encoder_set_output_state (encoder, caps, state);
708
709 gst_video_encoder_negotiate (GST_VIDEO_ENCODER (encoder));
710
711 return TRUE;
712 }
713
714 static opj_image_t *
gst_openjpeg_enc_fill_image(GstOpenJPEGEnc * self,GstVideoFrame * frame)715 gst_openjpeg_enc_fill_image (GstOpenJPEGEnc * self, GstVideoFrame * frame)
716 {
717 gint i, ncomps;
718 opj_image_cmptparm_t *comps;
719 OPJ_COLOR_SPACE colorspace;
720 opj_image_t *image;
721
722 ncomps = GST_VIDEO_FRAME_N_COMPONENTS (frame);
723 comps = g_new0 (opj_image_cmptparm_t, ncomps);
724
725 for (i = 0; i < ncomps; i++) {
726 comps[i].prec = GST_VIDEO_FRAME_COMP_DEPTH (frame, i);
727 comps[i].bpp = GST_VIDEO_FRAME_COMP_DEPTH (frame, i);
728 comps[i].sgnd = 0;
729 comps[i].w = GST_VIDEO_FRAME_COMP_WIDTH (frame, i);
730 comps[i].h = GST_VIDEO_FRAME_COMP_HEIGHT (frame, i);
731 comps[i].dx =
732 GST_VIDEO_FRAME_WIDTH (frame) / GST_VIDEO_FRAME_COMP_WIDTH (frame, i);
733 comps[i].dy =
734 GST_VIDEO_FRAME_HEIGHT (frame) / GST_VIDEO_FRAME_COMP_HEIGHT (frame, i);
735 }
736
737 if ((frame->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_YUV))
738 colorspace = OPJ_CLRSPC_SYCC;
739 else if ((frame->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_RGB))
740 colorspace = OPJ_CLRSPC_SRGB;
741 else if ((frame->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_GRAY))
742 colorspace = OPJ_CLRSPC_GRAY;
743 else
744 g_return_val_if_reached (NULL);
745
746 image = opj_image_create (ncomps, comps, colorspace);
747 g_free (comps);
748
749 image->x0 = image->y0 = 0;
750 image->x1 = GST_VIDEO_FRAME_WIDTH (frame);
751 image->y1 = GST_VIDEO_FRAME_HEIGHT (frame);
752
753 self->fill_image (image, frame);
754
755 return image;
756 }
757
758 static void
gst_openjpeg_enc_opj_error(const char * msg,void * userdata)759 gst_openjpeg_enc_opj_error (const char *msg, void *userdata)
760 {
761 GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (userdata);
762 gchar *trimmed = g_strchomp (g_strdup (msg));
763 GST_TRACE_OBJECT (self, "openjpeg error: %s", trimmed);
764 g_free (trimmed);
765 }
766
767 static void
gst_openjpeg_enc_opj_warning(const char * msg,void * userdata)768 gst_openjpeg_enc_opj_warning (const char *msg, void *userdata)
769 {
770 GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (userdata);
771 gchar *trimmed = g_strchomp (g_strdup (msg));
772 GST_TRACE_OBJECT (self, "openjpeg warning: %s", trimmed);
773 g_free (trimmed);
774 }
775
776 static void
gst_openjpeg_enc_opj_info(const char * msg,void * userdata)777 gst_openjpeg_enc_opj_info (const char *msg, void *userdata)
778 {
779 GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (userdata);
780 gchar *trimmed = g_strchomp (g_strdup (msg));
781 GST_TRACE_OBJECT (self, "openjpeg info: %s", trimmed);
782 g_free (trimmed);
783 }
784
785
786 #ifndef HAVE_OPENJPEG_1
787 typedef struct
788 {
789 guint8 *data;
790 guint allocsize;
791 guint offset;
792 guint size;
793 } MemStream;
794
795 static OPJ_SIZE_T
read_fn(void * p_buffer,OPJ_SIZE_T p_nb_bytes,void * p_user_data)796 read_fn (void *p_buffer, OPJ_SIZE_T p_nb_bytes, void *p_user_data)
797 {
798 g_return_val_if_reached (-1);
799 }
800
801 static OPJ_SIZE_T
write_fn(void * p_buffer,OPJ_SIZE_T p_nb_bytes,void * p_user_data)802 write_fn (void *p_buffer, OPJ_SIZE_T p_nb_bytes, void *p_user_data)
803 {
804 MemStream *mstream = p_user_data;
805
806 if (mstream->offset + p_nb_bytes > mstream->allocsize) {
807 while (mstream->offset + p_nb_bytes > mstream->allocsize)
808 mstream->allocsize *= 2;
809 mstream->data = g_realloc (mstream->data, mstream->allocsize);
810 }
811
812 memcpy (mstream->data + mstream->offset, p_buffer, p_nb_bytes);
813
814 if (mstream->offset + p_nb_bytes > mstream->size)
815 mstream->size = mstream->offset + p_nb_bytes;
816 mstream->offset += p_nb_bytes;
817
818 return p_nb_bytes;
819 }
820
821 static OPJ_OFF_T
skip_fn(OPJ_OFF_T p_nb_bytes,void * p_user_data)822 skip_fn (OPJ_OFF_T p_nb_bytes, void *p_user_data)
823 {
824 MemStream *mstream = p_user_data;
825
826 if (mstream->offset + p_nb_bytes > mstream->allocsize) {
827 while (mstream->offset + p_nb_bytes > mstream->allocsize)
828 mstream->allocsize *= 2;
829 mstream->data = g_realloc (mstream->data, mstream->allocsize);
830 }
831
832 if (mstream->offset + p_nb_bytes > mstream->size)
833 mstream->size = mstream->offset + p_nb_bytes;
834
835 mstream->offset += p_nb_bytes;
836
837 return p_nb_bytes;
838 }
839
840 static OPJ_BOOL
seek_fn(OPJ_OFF_T p_nb_bytes,void * p_user_data)841 seek_fn (OPJ_OFF_T p_nb_bytes, void *p_user_data)
842 {
843 MemStream *mstream = p_user_data;
844
845 if (p_nb_bytes > mstream->size)
846 return OPJ_FALSE;
847
848 mstream->offset = p_nb_bytes;
849
850 return OPJ_TRUE;
851 }
852 #endif
853
854 static GstFlowReturn
gst_openjpeg_enc_handle_frame(GstVideoEncoder * encoder,GstVideoCodecFrame * frame)855 gst_openjpeg_enc_handle_frame (GstVideoEncoder * encoder,
856 GstVideoCodecFrame * frame)
857 {
858 GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (encoder);
859 GstFlowReturn ret = GST_FLOW_OK;
860 #ifdef HAVE_OPENJPEG_1
861 opj_cinfo_t *enc;
862 GstMapInfo map;
863 guint length;
864 opj_cio_t *io;
865 #else
866 opj_codec_t *enc;
867 opj_stream_t *stream;
868 MemStream mstream;
869 #endif
870 opj_image_t *image;
871 GstVideoFrame vframe;
872
873 GST_DEBUG_OBJECT (self, "Handling frame");
874
875 enc = opj_create_compress (self->codec_format);
876 if (!enc)
877 goto initialization_error;
878
879 #ifdef HAVE_OPENJPEG_1
880 if (G_UNLIKELY (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >=
881 GST_LEVEL_TRACE)) {
882 opj_event_mgr_t callbacks;
883
884 callbacks.error_handler = gst_openjpeg_enc_opj_error;
885 callbacks.warning_handler = gst_openjpeg_enc_opj_warning;
886 callbacks.info_handler = gst_openjpeg_enc_opj_info;
887 opj_set_event_mgr ((opj_common_ptr) enc, &callbacks, self);
888 } else {
889 opj_set_event_mgr ((opj_common_ptr) enc, NULL, NULL);
890 }
891 #else
892 if (G_UNLIKELY (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >=
893 GST_LEVEL_TRACE)) {
894 opj_set_info_handler (enc, gst_openjpeg_enc_opj_info, self);
895 opj_set_warning_handler (enc, gst_openjpeg_enc_opj_warning, self);
896 opj_set_error_handler (enc, gst_openjpeg_enc_opj_error, self);
897 } else {
898 opj_set_info_handler (enc, NULL, NULL);
899 opj_set_warning_handler (enc, NULL, NULL);
900 opj_set_error_handler (enc, NULL, NULL);
901 }
902 #endif
903
904 if (!gst_video_frame_map (&vframe, &self->input_state->info,
905 frame->input_buffer, GST_MAP_READ))
906 goto map_read_error;
907
908 image = gst_openjpeg_enc_fill_image (self, &vframe);
909 if (!image)
910 goto fill_image_error;
911 gst_video_frame_unmap (&vframe);
912
913 if (vframe.info.finfo->flags & GST_VIDEO_FORMAT_FLAG_RGB) {
914 self->params.tcp_mct = 1;
915 }
916 opj_setup_encoder (enc, &self->params, image);
917
918 #ifdef HAVE_OPENJPEG_1
919 io = opj_cio_open ((opj_common_ptr) enc, NULL, 0);
920 if (!io)
921 goto open_error;
922
923 if (!opj_encode (enc, io, image, NULL))
924 goto encode_error;
925
926 opj_image_destroy (image);
927
928 length = cio_tell (io);
929
930 ret =
931 gst_video_encoder_allocate_output_frame (encoder, frame,
932 length + (self->is_jp2c ? 8 : 0));
933 if (ret != GST_FLOW_OK)
934 goto allocate_error;
935
936 gst_buffer_fill (frame->output_buffer, self->is_jp2c ? 8 : 0, io->buffer,
937 length);
938 if (self->is_jp2c) {
939 gst_buffer_map (frame->output_buffer, &map, GST_MAP_WRITE);
940 GST_WRITE_UINT32_BE (map.data, length + 8);
941 GST_WRITE_UINT32_BE (map.data + 4, GST_MAKE_FOURCC ('j', 'p', '2', 'c'));
942 gst_buffer_unmap (frame->output_buffer, &map);
943 }
944
945 opj_cio_close (io);
946 opj_destroy_compress (enc);
947 #else
948 stream = opj_stream_create (4096, OPJ_FALSE);
949 if (!stream)
950 goto open_error;
951
952 mstream.allocsize = 4096;
953 mstream.data = g_malloc (mstream.allocsize);
954 mstream.offset = 0;
955 mstream.size = 0;
956
957 opj_stream_set_read_function (stream, read_fn);
958 opj_stream_set_write_function (stream, write_fn);
959 opj_stream_set_skip_function (stream, skip_fn);
960 opj_stream_set_seek_function (stream, seek_fn);
961 opj_stream_set_user_data (stream, &mstream, NULL);
962 opj_stream_set_user_data_length (stream, mstream.size);
963
964 if (!opj_start_compress (enc, image, stream))
965 goto encode_error;
966
967 if (!opj_encode (enc, stream))
968 goto encode_error;
969
970 if (!opj_end_compress (enc, stream))
971 goto encode_error;
972
973 opj_image_destroy (image);
974 opj_stream_destroy (stream);
975 opj_destroy_codec (enc);
976
977 frame->output_buffer = gst_buffer_new ();
978
979 if (self->is_jp2c) {
980 GstMapInfo map;
981 GstMemory *mem;
982
983 mem = gst_allocator_alloc (NULL, 8, NULL);
984 gst_memory_map (mem, &map, GST_MAP_WRITE);
985 GST_WRITE_UINT32_BE (map.data, mstream.size + 8);
986 GST_WRITE_UINT32_BE (map.data + 4, GST_MAKE_FOURCC ('j', 'p', '2', 'c'));
987 gst_memory_unmap (mem, &map);
988 gst_buffer_append_memory (frame->output_buffer, mem);
989 }
990
991 gst_buffer_append_memory (frame->output_buffer,
992 gst_memory_new_wrapped (0, mstream.data, mstream.allocsize, 0,
993 mstream.size, NULL, (GDestroyNotify) g_free));
994 #endif
995
996 GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
997 ret = gst_video_encoder_finish_frame (encoder, frame);
998
999 return ret;
1000
1001 initialization_error:
1002 {
1003 gst_video_codec_frame_unref (frame);
1004 GST_ELEMENT_ERROR (self, LIBRARY, INIT,
1005 ("Failed to initialize OpenJPEG encoder"), (NULL));
1006 return GST_FLOW_ERROR;
1007 }
1008 map_read_error:
1009 {
1010 #ifdef HAVE_OPENJPEG_1
1011 opj_destroy_compress (enc);
1012 #else
1013 opj_destroy_codec (enc);
1014 #endif
1015 gst_video_codec_frame_unref (frame);
1016
1017 GST_ELEMENT_ERROR (self, CORE, FAILED,
1018 ("Failed to map input buffer"), (NULL));
1019 return GST_FLOW_ERROR;
1020 }
1021 fill_image_error:
1022 {
1023 #ifdef HAVE_OPENJPEG_1
1024 opj_destroy_compress (enc);
1025 #else
1026 opj_destroy_codec (enc);
1027 #endif
1028 gst_video_frame_unmap (&vframe);
1029 gst_video_codec_frame_unref (frame);
1030
1031 GST_ELEMENT_ERROR (self, LIBRARY, INIT,
1032 ("Failed to fill OpenJPEG image"), (NULL));
1033 return GST_FLOW_ERROR;
1034 }
1035 open_error:
1036 {
1037 opj_image_destroy (image);
1038 #ifdef HAVE_OPENJPEG_1
1039 opj_destroy_compress (enc);
1040 #else
1041 opj_destroy_codec (enc);
1042 #endif
1043 gst_video_codec_frame_unref (frame);
1044
1045 GST_ELEMENT_ERROR (self, LIBRARY, INIT,
1046 ("Failed to open OpenJPEG data"), (NULL));
1047 return GST_FLOW_ERROR;
1048 }
1049 encode_error:
1050 {
1051 #ifdef HAVE_OPENJPEG_1
1052 opj_cio_close (io);
1053 opj_image_destroy (image);
1054 opj_destroy_compress (enc);
1055 #else
1056 opj_stream_destroy (stream);
1057 g_free (mstream.data);
1058 opj_image_destroy (image);
1059 opj_destroy_codec (enc);
1060 #endif
1061 gst_video_codec_frame_unref (frame);
1062
1063 GST_ELEMENT_ERROR (self, STREAM, ENCODE,
1064 ("Failed to encode OpenJPEG stream"), (NULL));
1065 return GST_FLOW_ERROR;
1066 }
1067 #ifdef HAVE_OPENJPEG_1
1068 allocate_error:
1069 {
1070 opj_cio_close (io);
1071 opj_destroy_compress (enc);
1072 gst_video_codec_frame_unref (frame);
1073
1074 GST_ELEMENT_ERROR (self, CORE, FAILED,
1075 ("Failed to allocate output buffer"), (NULL));
1076 return ret;
1077 }
1078 #endif
1079 }
1080
1081 static gboolean
gst_openjpeg_enc_propose_allocation(GstVideoEncoder * encoder,GstQuery * query)1082 gst_openjpeg_enc_propose_allocation (GstVideoEncoder * encoder,
1083 GstQuery * query)
1084 {
1085 gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
1086
1087 return GST_VIDEO_ENCODER_CLASS (parent_class)->propose_allocation (encoder,
1088 query);
1089 }
1090