1 /*
2 * Copyright (c) 2014, Ericsson AB. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without modification,
5 * are permitted provided that the following conditions are met:
6 *
7 * 1. Redistributions of source code must retain the above copyright notice, this
8 * list of conditions and the following disclaimer.
9 *
10 * 2. Redistributions in binary form must reproduce the above copyright notice, this
11 * list of conditions and the following disclaimer in the documentation and/or other
12 * materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
15 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
16 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
17 * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
18 * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
19 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
21 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
23 * OF SUCH DAMAGE.
24 */
25
26 #ifdef HAVE_CONFIG_H
27 #include "config.h"
28 #endif
29
30 #include "gstopenh264dec.h"
31
32 #include <wels/codec_ver.h>
33 #define OPENH264_VERSION_CHECK(maj,min) ((OPENH264_MAJOR > (maj)) || (OPENH264_MAJOR == (maj) && OPENH264_MINOR >= (min)))
34
35 #include <gst/gst.h>
36 #include <gst/video/video.h>
37 #include <gst/video/gstvideodecoder.h>
38 #include <string.h> /* for memcpy */
39
40 #if OPENH264_VERSION_CHECK (1,9)
41 #define HAVE_OPENH264_MAIN_PROFILE 1
42 #else
43 #define HAVE_OPENH264_MAIN_PROFILE 0
44 #endif
45
46 GST_DEBUG_CATEGORY_STATIC (gst_openh264dec_debug_category);
47 #define GST_CAT_DEFAULT gst_openh264dec_debug_category
48
49 /* prototypes */
50 static gboolean gst_openh264dec_start (GstVideoDecoder * decoder);
51 static gboolean gst_openh264dec_stop (GstVideoDecoder * decoder);
52
53 static gboolean gst_openh264dec_set_format (GstVideoDecoder * decoder,
54 GstVideoCodecState * state);
55 static gboolean gst_openh264dec_reset (GstVideoDecoder * decoder,
56 gboolean hard);
57 static GstFlowReturn gst_openh264dec_finish (GstVideoDecoder * decoder);
58 static GstFlowReturn gst_openh264dec_handle_frame (GstVideoDecoder * decoder,
59 GstVideoCodecFrame * frame);
60 static gboolean gst_openh264dec_decide_allocation (GstVideoDecoder * decoder,
61 GstQuery * query);
62
63 #if HAVE_OPENH264_MAIN_PROFILE
64 #define SUPPORTED_PROFILE_STR "profile=(string){ constrained-baseline, baseline, main, high }"
65 #else
66 #define SUPPORTED_PROFILE_STR "profile=(string){ constrained-baseline, baseline }"
67 #endif
68
69 /* pad templates */
70 static GstStaticPadTemplate gst_openh264dec_sink_template =
71 GST_STATIC_PAD_TEMPLATE ("sink",
72 GST_PAD_SINK,
73 GST_PAD_ALWAYS,
74 GST_STATIC_CAPS
75 ("video/x-h264, stream-format=(string)byte-stream, alignment=(string)au, "
76 SUPPORTED_PROFILE_STR
77 ));
78
79 static GstStaticPadTemplate gst_openh264dec_src_template =
80 GST_STATIC_PAD_TEMPLATE ("src",
81 GST_PAD_SRC,
82 GST_PAD_ALWAYS,
83 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("I420")));
84
85 /* class initialization */
86
87 G_DEFINE_TYPE_WITH_CODE (GstOpenh264Dec, gst_openh264dec,
88 GST_TYPE_VIDEO_DECODER,
89 GST_DEBUG_CATEGORY_INIT (gst_openh264dec_debug_category, "openh264dec", 0,
90 "debug category for openh264dec element"));
91
92 static void
gst_openh264dec_class_init(GstOpenh264DecClass * klass)93 gst_openh264dec_class_init (GstOpenh264DecClass * klass)
94 {
95 GstVideoDecoderClass *video_decoder_class = GST_VIDEO_DECODER_CLASS (klass);
96
97 gst_element_class_add_static_pad_template (GST_ELEMENT_CLASS (klass),
98 &gst_openh264dec_sink_template);
99 gst_element_class_add_static_pad_template (GST_ELEMENT_CLASS (klass),
100 &gst_openh264dec_src_template);
101
102 gst_element_class_set_static_metadata (GST_ELEMENT_CLASS (klass),
103 "OpenH264 video decoder", "Decoder/Video", "OpenH264 video decoder",
104 "Ericsson AB, http://www.ericsson.com");
105
106 video_decoder_class->start = GST_DEBUG_FUNCPTR (gst_openh264dec_start);
107 video_decoder_class->stop = GST_DEBUG_FUNCPTR (gst_openh264dec_stop);
108
109 video_decoder_class->set_format =
110 GST_DEBUG_FUNCPTR (gst_openh264dec_set_format);
111 video_decoder_class->reset = GST_DEBUG_FUNCPTR (gst_openh264dec_reset);
112 video_decoder_class->finish = GST_DEBUG_FUNCPTR (gst_openh264dec_finish);
113 video_decoder_class->handle_frame =
114 GST_DEBUG_FUNCPTR (gst_openh264dec_handle_frame);
115 video_decoder_class->decide_allocation =
116 GST_DEBUG_FUNCPTR (gst_openh264dec_decide_allocation);
117 }
118
119 static void
gst_openh264dec_init(GstOpenh264Dec * openh264dec)120 gst_openh264dec_init (GstOpenh264Dec * openh264dec)
121 {
122 openh264dec->decoder = NULL;
123
124 gst_video_decoder_set_packetized (GST_VIDEO_DECODER (openh264dec), TRUE);
125 gst_video_decoder_set_needs_format (GST_VIDEO_DECODER (openh264dec), TRUE);
126 }
127
128 #ifndef GST_DISABLE_GST_DEBUG
129 static void
openh264_trace_cb(void * ctx,int level,const char * string)130 openh264_trace_cb (void *ctx, int level, const char *string)
131 {
132 GObject *o = G_OBJECT (ctx);
133 GstDebugLevel lvl = GST_LEVEL_WARNING;
134
135 if (level >= WELS_LOG_DETAIL)
136 lvl = GST_LEVEL_LOG;
137 else if (level >= WELS_LOG_DEBUG)
138 lvl = GST_LEVEL_DEBUG;
139 else if (level >= WELS_LOG_INFO)
140 lvl = GST_LEVEL_INFO;
141 else if (level >= WELS_LOG_WARNING)
142 lvl = GST_LEVEL_WARNING;
143 else if (level >= WELS_LOG_ERROR)
144 lvl = GST_LEVEL_ERROR;
145
146 gst_debug_log (GST_CAT_DEFAULT, lvl, "", "", 0, o, "%s", string);
147 }
148 #endif
149
150 static gboolean
gst_openh264dec_start(GstVideoDecoder * decoder)151 gst_openh264dec_start (GstVideoDecoder * decoder)
152 {
153 GstOpenh264Dec *openh264dec = GST_OPENH264DEC (decoder);
154 gint ret;
155 SDecodingParam dec_param = { 0 };
156
157 if (openh264dec->decoder != NULL) {
158 openh264dec->decoder->Uninitialize ();
159 WelsDestroyDecoder (openh264dec->decoder);
160 openh264dec->decoder = NULL;
161 }
162 WelsCreateDecoder (&(openh264dec->decoder));
163
164 #ifndef GST_DISABLE_GST_DEBUG
165 {
166 int log_level = WELS_LOG_WARNING;
167 WelsTraceCallback log_cb = openh264_trace_cb;
168
169 openh264dec->decoder->SetOption (DECODER_OPTION_TRACE_LEVEL, &log_level);
170 openh264dec->decoder->SetOption (DECODER_OPTION_TRACE_CALLBACK,
171 (void *) &log_cb);
172 openh264dec->decoder->SetOption (DECODER_OPTION_TRACE_CALLBACK_CONTEXT,
173 (void *) &decoder);
174 }
175 #endif
176
177 dec_param.uiTargetDqLayer = 255;
178 dec_param.eEcActiveIdc = ERROR_CON_FRAME_COPY;
179 #if OPENH264_MAJOR == 1 && OPENH264_MINOR < 6
180 dec_param.eOutputColorFormat = videoFormatI420;
181 #endif
182 dec_param.sVideoProperty.eVideoBsType = VIDEO_BITSTREAM_AVC;
183
184 ret = openh264dec->decoder->Initialize (&dec_param);
185
186 GST_DEBUG_OBJECT (openh264dec,
187 "openh264_dec_start called, openh264dec %sinitialized OK!",
188 (ret != cmResultSuccess) ? "NOT " : "");
189
190 return (ret == cmResultSuccess);
191 }
192
193 static gboolean
gst_openh264dec_stop(GstVideoDecoder * decoder)194 gst_openh264dec_stop (GstVideoDecoder * decoder)
195 {
196 GstOpenh264Dec *openh264dec = GST_OPENH264DEC (decoder);
197
198 if (openh264dec->decoder) {
199 openh264dec->decoder->Uninitialize ();
200 WelsDestroyDecoder (openh264dec->decoder);
201 openh264dec->decoder = NULL;
202 }
203
204 if (openh264dec->input_state) {
205 gst_video_codec_state_unref (openh264dec->input_state);
206 openh264dec->input_state = NULL;
207 }
208 openh264dec->width = openh264dec->height = 0;
209
210 return TRUE;
211 }
212
213 static gboolean
gst_openh264dec_set_format(GstVideoDecoder * decoder,GstVideoCodecState * state)214 gst_openh264dec_set_format (GstVideoDecoder * decoder,
215 GstVideoCodecState * state)
216 {
217 GstOpenh264Dec *openh264dec = GST_OPENH264DEC (decoder);
218
219 GST_DEBUG_OBJECT (openh264dec, "input caps: %" GST_PTR_FORMAT, state->caps);
220
221 if (openh264dec->input_state) {
222 gst_video_codec_state_unref (openh264dec->input_state);
223 openh264dec->input_state = NULL;
224 }
225 openh264dec->input_state = gst_video_codec_state_ref (state);
226
227 return TRUE;
228 }
229
230 static gboolean
gst_openh264dec_reset(GstVideoDecoder * decoder,gboolean hard)231 gst_openh264dec_reset (GstVideoDecoder * decoder, gboolean hard)
232 {
233 GstOpenh264Dec *openh264dec = GST_OPENH264DEC (decoder);
234
235 GST_DEBUG_OBJECT (openh264dec, "reset");
236
237 return TRUE;
238 }
239
240 static GstFlowReturn
gst_openh264dec_handle_frame(GstVideoDecoder * decoder,GstVideoCodecFrame * frame)241 gst_openh264dec_handle_frame (GstVideoDecoder * decoder,
242 GstVideoCodecFrame * frame)
243 {
244 GstOpenh264Dec *openh264dec = GST_OPENH264DEC (decoder);
245 GstMapInfo map_info;
246 GstVideoCodecState *state;
247 SBufferInfo dst_buf_info;
248 DECODING_STATE ret;
249 guint8 *yuvdata[3];
250 GstFlowReturn flow_status;
251 GstVideoFrame video_frame;
252 guint actual_width, actual_height;
253 guint i;
254 guint8 *p;
255 guint row_stride, component_width, component_height, src_width, row;
256
257 if (frame == NULL) {
258 #if OPENH264_VERSION_CHECK (1,9)
259 /* Called with no videoframe for EOS logic. Drain out */
260 int end_of_stream = 1;
261 memset (&dst_buf_info, 0, sizeof (SBufferInfo));
262
263 openh264dec->decoder->SetOption (DECODER_OPTION_END_OF_STREAM,
264 &end_of_stream);
265 ret = openh264dec->decoder->FlushFrame (yuvdata, &dst_buf_info);
266
267 if (ret != dsErrorFree || dst_buf_info.iBufferStatus != 1) {
268 GST_DEBUG_OBJECT (decoder, "No more frames to retrieve at EOS");
269 return GST_FLOW_EOS;
270 }
271 #else
272 return GST_FLOW_EOS;
273 #endif
274 } else {
275 if (!gst_buffer_map (frame->input_buffer, &map_info, GST_MAP_READ)) {
276 GST_ERROR_OBJECT (openh264dec, "Cannot map input buffer!");
277 gst_video_codec_frame_unref (frame);
278 return GST_FLOW_ERROR;
279 }
280
281 GST_LOG_OBJECT (openh264dec, "handle frame, 1st NAL type %d",
282 map_info.size > 4 ? map_info.data[4] & 0x1f : -1);
283
284 memset (&dst_buf_info, 0, sizeof (SBufferInfo));
285 /* Use the unsigned long long OpenH264 timestamp to store the system_frame_number
286 * to track the original frame through any OpenH264 reordering */
287 dst_buf_info.uiInBsTimeStamp = frame->system_frame_number;
288
289 GST_LOG_OBJECT (decoder, "Submitting frame with PTS %" GST_TIME_FORMAT
290 " and frame ref %" G_GUINT64_FORMAT,
291 GST_TIME_ARGS (frame->pts), (guint64) frame->system_frame_number);
292
293 ret =
294 openh264dec->decoder->DecodeFrameNoDelay (map_info.data, map_info.size,
295 yuvdata, &dst_buf_info);
296 gst_buffer_unmap (frame->input_buffer, &map_info);
297
298 if (ret != dsErrorFree) {
299 /* Request a key unit from upstream */
300 GST_DEBUG_OBJECT (openh264dec, "Requesting a key unit");
301 gst_pad_push_event (GST_VIDEO_DECODER_SINK_PAD (decoder),
302 gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE,
303 FALSE, 0));
304
305 GST_LOG_OBJECT (openh264dec, "error decoding nal, return code: %d", ret);
306 gst_video_codec_frame_unref (frame);
307
308 /* Get back the frame that was reported as errored */
309 frame =
310 gst_video_decoder_get_frame (decoder, dst_buf_info.uiOutYuvTimeStamp);
311 if (frame) {
312 GST_LOG_OBJECT (decoder,
313 "Dropping errored frame ref %" G_GUINT64_FORMAT,
314 (guint64) dst_buf_info.uiOutYuvTimeStamp);
315 return gst_video_decoder_drop_frame (decoder, frame);
316 }
317 return GST_FLOW_OK;
318 }
319
320 gst_video_codec_frame_unref (frame);
321 frame = NULL;
322
323 /* No output available yet */
324 if (dst_buf_info.iBufferStatus != 1) {
325 GST_LOG_OBJECT (decoder, "No buffer decoded yet");
326 return GST_FLOW_OK;
327 }
328 }
329
330 GST_LOG_OBJECT (decoder, "Got back frame with frame ref %" G_GUINT64_FORMAT,
331 (guint64) dst_buf_info.uiOutYuvTimeStamp);
332
333 /* OpenH264 lets us pass an int reference through
334 * so we can retrieve the input frame now */
335 frame = gst_video_decoder_get_frame (decoder, dst_buf_info.uiOutYuvTimeStamp);
336 if (!frame) {
337 /* Where did our frame go? This is a reference tracking error. */
338 GST_WARNING_OBJECT (decoder,
339 "Failed to look up frame ref %" G_GUINT64_FORMAT,
340 (guint64) dst_buf_info.uiOutYuvTimeStamp);
341 return GST_FLOW_OK;
342 }
343
344 actual_width = dst_buf_info.UsrData.sSystemBuffer.iWidth;
345 actual_height = dst_buf_info.UsrData.sSystemBuffer.iHeight;
346
347 if (!gst_pad_has_current_caps (GST_VIDEO_DECODER_SRC_PAD (openh264dec))
348 || actual_width != openh264dec->width
349 || actual_height != openh264dec->height) {
350 state =
351 gst_video_decoder_set_output_state (decoder, GST_VIDEO_FORMAT_I420,
352 actual_width, actual_height, openh264dec->input_state);
353 openh264dec->width = actual_width;
354 openh264dec->height = actual_height;
355
356 if (!gst_video_decoder_negotiate (decoder)) {
357 GST_ERROR_OBJECT (openh264dec,
358 "Failed to negotiate with downstream elements");
359 gst_video_codec_state_unref (state);
360 gst_video_codec_frame_unref (frame);
361 return GST_FLOW_NOT_NEGOTIATED;
362 }
363 } else {
364 state = gst_video_decoder_get_output_state (decoder);
365 }
366
367 flow_status = gst_video_decoder_allocate_output_frame (decoder, frame);
368 if (flow_status != GST_FLOW_OK) {
369 gst_video_codec_state_unref (state);
370 gst_video_codec_frame_unref (frame);
371 return flow_status;
372 }
373
374 if (!gst_video_frame_map (&video_frame, &state->info, frame->output_buffer,
375 GST_MAP_WRITE)) {
376 GST_ERROR_OBJECT (openh264dec, "Cannot map output buffer!");
377 gst_video_codec_state_unref (state);
378 gst_video_codec_frame_unref (frame);
379 return GST_FLOW_ERROR;
380 }
381
382 for (i = 0; i < 3; i++) {
383 p = GST_VIDEO_FRAME_COMP_DATA (&video_frame, i);
384 row_stride = GST_VIDEO_FRAME_COMP_STRIDE (&video_frame, i);
385 component_width = GST_VIDEO_FRAME_COMP_WIDTH (&video_frame, i);
386 component_height = GST_VIDEO_FRAME_COMP_HEIGHT (&video_frame, i);
387 src_width =
388 i <
389 1 ? dst_buf_info.UsrData.sSystemBuffer.
390 iStride[0] : dst_buf_info.UsrData.sSystemBuffer.iStride[1];
391 for (row = 0; row < component_height; row++) {
392 memcpy (p, yuvdata[i], component_width);
393 p += row_stride;
394 yuvdata[i] += src_width;
395 }
396 }
397 gst_video_codec_state_unref (state);
398 gst_video_frame_unmap (&video_frame);
399
400 return gst_video_decoder_finish_frame (decoder, frame);
401 }
402
403 static GstFlowReturn
gst_openh264dec_finish(GstVideoDecoder * decoder)404 gst_openh264dec_finish (GstVideoDecoder * decoder)
405 {
406 GstOpenh264Dec *openh264dec = GST_OPENH264DEC (decoder);
407
408 GST_DEBUG_OBJECT (openh264dec, "finish");
409
410 /* Decoder not negotiated yet */
411 if (openh264dec->width == 0)
412 return GST_FLOW_OK;
413
414 /* Drain all pending frames */
415 while ((gst_openh264dec_handle_frame (decoder, NULL)) == GST_FLOW_OK);
416
417 return GST_FLOW_OK;
418 }
419
420 static gboolean
gst_openh264dec_decide_allocation(GstVideoDecoder * decoder,GstQuery * query)421 gst_openh264dec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
422 {
423 GstVideoCodecState *state;
424 GstBufferPool *pool;
425 guint size, min, max;
426 GstStructure *config;
427
428 if (!GST_VIDEO_DECODER_CLASS (gst_openh264dec_parent_class)->decide_allocation
429 (decoder, query))
430 return FALSE;
431
432 state = gst_video_decoder_get_output_state (decoder);
433
434 gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
435
436 config = gst_buffer_pool_get_config (pool);
437 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
438 gst_buffer_pool_config_add_option (config,
439 GST_BUFFER_POOL_OPTION_VIDEO_META);
440 }
441
442 gst_buffer_pool_set_config (pool, config);
443
444 gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
445
446 gst_object_unref (pool);
447 gst_video_codec_state_unref (state);
448
449 return TRUE;
450 }
451