1 /*
2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11
12 #include <stdlib.h>
13 #include <string.h>
14 #include "vpx/vpx_decoder.h"
15 #include "vpx/vp8dx.h"
16 #include "vpx/internal/vpx_codec_internal.h"
17 #include "vpx_version.h"
18 #include "common/onyxd.h"
19 #include "decoder/onyxd_int.h"
20
21 #define VP8_CAP_POSTPROC (CONFIG_POSTPROC ? VPX_CODEC_CAP_POSTPROC : 0)
22
23 typedef vpx_codec_stream_info_t vp8_stream_info_t;
24
25 /* Structures for handling memory allocations */
26 typedef enum
27 {
28 VP8_SEG_ALG_PRIV = 256,
29 VP8_SEG_MAX
30 } mem_seg_id_t;
31 #define NELEMENTS(x) ((int)(sizeof(x)/sizeof(x[0])))
32
33 static unsigned long vp8_priv_sz(const vpx_codec_dec_cfg_t *si, vpx_codec_flags_t);
34
35 typedef struct
36 {
37 unsigned int id;
38 unsigned long sz;
39 unsigned int align;
40 unsigned int flags;
41 unsigned long(*calc_sz)(const vpx_codec_dec_cfg_t *, vpx_codec_flags_t);
42 } mem_req_t;
43
44 static const mem_req_t vp8_mem_req_segs[] =
45 {
46 {VP8_SEG_ALG_PRIV, 0, 8, VPX_CODEC_MEM_ZERO, vp8_priv_sz},
47 {VP8_SEG_MAX, 0, 0, 0, NULL}
48 };
49
50 struct vpx_codec_alg_priv
51 {
52 vpx_codec_priv_t base;
53 vpx_codec_mmap_t mmaps[NELEMENTS(vp8_mem_req_segs)-1];
54 vpx_codec_dec_cfg_t cfg;
55 vp8_stream_info_t si;
56 int defer_alloc;
57 int decoder_init;
58 VP8D_PTR pbi;
59 int postproc_cfg_set;
60 vp8_postproc_cfg_t postproc_cfg;
61 #if CONFIG_POSTPROC_VISUALIZER
62 unsigned int dbg_postproc_flag;
63 int dbg_color_ref_frame_flag;
64 int dbg_color_mb_modes_flag;
65 int dbg_color_b_modes_flag;
66 int dbg_display_mv_flag;
67 #endif
68 vpx_image_t img;
69 int img_setup;
70 int img_avail;
71 };
72
vp8_priv_sz(const vpx_codec_dec_cfg_t * si,vpx_codec_flags_t flags)73 static unsigned long vp8_priv_sz(const vpx_codec_dec_cfg_t *si, vpx_codec_flags_t flags)
74 {
75 /* Although this declaration is constant, we can't use it in the requested
76 * segments list because we want to define the requested segments list
77 * before defining the private type (so that the number of memory maps is
78 * known)
79 */
80 (void)si;
81 return sizeof(vpx_codec_alg_priv_t);
82 }
83
84
vp8_mmap_dtor(vpx_codec_mmap_t * mmap)85 static void vp8_mmap_dtor(vpx_codec_mmap_t *mmap)
86 {
87 free(mmap->priv);
88 }
89
vp8_mmap_alloc(vpx_codec_mmap_t * mmap)90 static vpx_codec_err_t vp8_mmap_alloc(vpx_codec_mmap_t *mmap)
91 {
92 vpx_codec_err_t res;
93 unsigned int align;
94
95 align = mmap->align ? mmap->align - 1 : 0;
96
97 if (mmap->flags & VPX_CODEC_MEM_ZERO)
98 mmap->priv = calloc(1, mmap->sz + align);
99 else
100 mmap->priv = malloc(mmap->sz + align);
101
102 res = (mmap->priv) ? VPX_CODEC_OK : VPX_CODEC_MEM_ERROR;
103 mmap->base = (void *)((((uintptr_t)mmap->priv) + align) & ~(uintptr_t)align);
104 mmap->dtor = vp8_mmap_dtor;
105 return res;
106 }
107
vp8_validate_mmaps(const vp8_stream_info_t * si,const vpx_codec_mmap_t * mmaps,vpx_codec_flags_t init_flags)108 static vpx_codec_err_t vp8_validate_mmaps(const vp8_stream_info_t *si,
109 const vpx_codec_mmap_t *mmaps,
110 vpx_codec_flags_t init_flags)
111 {
112 int i;
113 vpx_codec_err_t res = VPX_CODEC_OK;
114
115 for (i = 0; i < NELEMENTS(vp8_mem_req_segs) - 1; i++)
116 {
117 /* Ensure the segment has been allocated */
118 if (!mmaps[i].base)
119 {
120 res = VPX_CODEC_MEM_ERROR;
121 break;
122 }
123
124 /* Verify variable size segment is big enough for the current si. */
125 if (vp8_mem_req_segs[i].calc_sz)
126 {
127 vpx_codec_dec_cfg_t cfg;
128
129 cfg.w = si->w;
130 cfg.h = si->h;
131
132 if (mmaps[i].sz < vp8_mem_req_segs[i].calc_sz(&cfg, init_flags))
133 {
134 res = VPX_CODEC_MEM_ERROR;
135 break;
136 }
137 }
138 }
139
140 return res;
141 }
142
vp8_init_ctx(vpx_codec_ctx_t * ctx,const vpx_codec_mmap_t * mmap)143 static void vp8_init_ctx(vpx_codec_ctx_t *ctx, const vpx_codec_mmap_t *mmap)
144 {
145 int i;
146
147 ctx->priv = mmap->base;
148 ctx->priv->sz = sizeof(*ctx->priv);
149 ctx->priv->iface = ctx->iface;
150 ctx->priv->alg_priv = mmap->base;
151
152 for (i = 0; i < NELEMENTS(ctx->priv->alg_priv->mmaps); i++)
153 ctx->priv->alg_priv->mmaps[i].id = vp8_mem_req_segs[i].id;
154
155 ctx->priv->alg_priv->mmaps[0] = *mmap;
156 ctx->priv->alg_priv->si.sz = sizeof(ctx->priv->alg_priv->si);
157 ctx->priv->init_flags = ctx->init_flags;
158
159 if (ctx->config.dec)
160 {
161 /* Update the reference to the config structure to an internal copy. */
162 ctx->priv->alg_priv->cfg = *ctx->config.dec;
163 ctx->config.dec = &ctx->priv->alg_priv->cfg;
164 }
165 }
166
mmap_lkup(vpx_codec_alg_priv_t * ctx,unsigned int id)167 static void *mmap_lkup(vpx_codec_alg_priv_t *ctx, unsigned int id)
168 {
169 int i;
170
171 for (i = 0; i < NELEMENTS(ctx->mmaps); i++)
172 if (ctx->mmaps[i].id == id)
173 return ctx->mmaps[i].base;
174
175 return NULL;
176 }
vp8_finalize_mmaps(vpx_codec_alg_priv_t * ctx)177 static void vp8_finalize_mmaps(vpx_codec_alg_priv_t *ctx)
178 {
179 /* nothing to clean up */
180 }
181
vp8_init(vpx_codec_ctx_t * ctx)182 static vpx_codec_err_t vp8_init(vpx_codec_ctx_t *ctx)
183 {
184 vpx_codec_err_t res = VPX_CODEC_OK;
185
186 /* This function only allocates space for the vpx_codec_alg_priv_t
187 * structure. More memory may be required at the time the stream
188 * information becomes known.
189 */
190 if (!ctx->priv)
191 {
192 vpx_codec_mmap_t mmap;
193
194 mmap.id = vp8_mem_req_segs[0].id;
195 mmap.sz = sizeof(vpx_codec_alg_priv_t);
196 mmap.align = vp8_mem_req_segs[0].align;
197 mmap.flags = vp8_mem_req_segs[0].flags;
198
199 res = vp8_mmap_alloc(&mmap);
200
201 if (!res)
202 {
203 vp8_init_ctx(ctx, &mmap);
204
205 ctx->priv->alg_priv->defer_alloc = 1;
206 /*post processing level initialized to do nothing */
207 }
208 }
209
210 return res;
211 }
212
vp8_destroy(vpx_codec_alg_priv_t * ctx)213 static vpx_codec_err_t vp8_destroy(vpx_codec_alg_priv_t *ctx)
214 {
215 int i;
216
217 vp8dx_remove_decompressor(ctx->pbi);
218
219 for (i = NELEMENTS(ctx->mmaps) - 1; i >= 0; i--)
220 {
221 if (ctx->mmaps[i].dtor)
222 ctx->mmaps[i].dtor(&ctx->mmaps[i]);
223 }
224
225 return VPX_CODEC_OK;
226 }
227
vp8_peek_si(const uint8_t * data,unsigned int data_sz,vpx_codec_stream_info_t * si)228 static vpx_codec_err_t vp8_peek_si(const uint8_t *data,
229 unsigned int data_sz,
230 vpx_codec_stream_info_t *si)
231 {
232 vpx_codec_err_t res = VPX_CODEC_OK;
233
234 if(data + data_sz <= data)
235 res = VPX_CODEC_INVALID_PARAM;
236 else
237 {
238 /* Parse uncompresssed part of key frame header.
239 * 3 bytes:- including version, frame type and an offset
240 * 3 bytes:- sync code (0x9d, 0x01, 0x2a)
241 * 4 bytes:- including image width and height in the lowest 14 bits
242 * of each 2-byte value.
243 */
244 si->is_kf = 0;
245
246 if (data_sz >= 10 && !(data[0] & 0x01)) /* I-Frame */
247 {
248 const uint8_t *c = data + 3;
249 si->is_kf = 1;
250
251 /* vet via sync code */
252 if (c[0] != 0x9d || c[1] != 0x01 || c[2] != 0x2a)
253 res = VPX_CODEC_UNSUP_BITSTREAM;
254
255 si->w = (c[3] | (c[4] << 8)) & 0x3fff;
256 si->h = (c[5] | (c[6] << 8)) & 0x3fff;
257
258 /*printf("w=%d, h=%d\n", si->w, si->h);*/
259 if (!(si->h | si->w))
260 res = VPX_CODEC_UNSUP_BITSTREAM;
261 }
262 else
263 res = VPX_CODEC_UNSUP_BITSTREAM;
264 }
265
266 return res;
267
268 }
269
vp8_get_si(vpx_codec_alg_priv_t * ctx,vpx_codec_stream_info_t * si)270 static vpx_codec_err_t vp8_get_si(vpx_codec_alg_priv_t *ctx,
271 vpx_codec_stream_info_t *si)
272 {
273
274 unsigned int sz;
275
276 if (si->sz >= sizeof(vp8_stream_info_t))
277 sz = sizeof(vp8_stream_info_t);
278 else
279 sz = sizeof(vpx_codec_stream_info_t);
280
281 memcpy(si, &ctx->si, sz);
282 si->sz = sz;
283
284 return VPX_CODEC_OK;
285 }
286
287
288 static vpx_codec_err_t
update_error_state(vpx_codec_alg_priv_t * ctx,const struct vpx_internal_error_info * error)289 update_error_state(vpx_codec_alg_priv_t *ctx,
290 const struct vpx_internal_error_info *error)
291 {
292 vpx_codec_err_t res;
293
294 if ((res = error->error_code))
295 ctx->base.err_detail = error->has_detail
296 ? error->detail
297 : NULL;
298
299 return res;
300 }
301
302
vp8_decode(vpx_codec_alg_priv_t * ctx,const uint8_t * data,unsigned int data_sz,void * user_priv,long deadline)303 static vpx_codec_err_t vp8_decode(vpx_codec_alg_priv_t *ctx,
304 const uint8_t *data,
305 unsigned int data_sz,
306 void *user_priv,
307 long deadline)
308 {
309 vpx_codec_err_t res = VPX_CODEC_OK;
310
311 ctx->img_avail = 0;
312
313 /* Determine the stream parameters. Note that we rely on peek_si to
314 * validate that we have a buffer that does not wrap around the top
315 * of the heap.
316 */
317 if (!ctx->si.h)
318 res = ctx->base.iface->dec.peek_si(data, data_sz, &ctx->si);
319
320
321 /* Perform deferred allocations, if required */
322 if (!res && ctx->defer_alloc)
323 {
324 int i;
325
326 for (i = 1; !res && i < NELEMENTS(ctx->mmaps); i++)
327 {
328 vpx_codec_dec_cfg_t cfg;
329
330 cfg.w = ctx->si.w;
331 cfg.h = ctx->si.h;
332 ctx->mmaps[i].id = vp8_mem_req_segs[i].id;
333 ctx->mmaps[i].sz = vp8_mem_req_segs[i].sz;
334 ctx->mmaps[i].align = vp8_mem_req_segs[i].align;
335 ctx->mmaps[i].flags = vp8_mem_req_segs[i].flags;
336
337 if (!ctx->mmaps[i].sz)
338 ctx->mmaps[i].sz = vp8_mem_req_segs[i].calc_sz(&cfg,
339 ctx->base.init_flags);
340
341 res = vp8_mmap_alloc(&ctx->mmaps[i]);
342 }
343
344 if (!res)
345 vp8_finalize_mmaps(ctx);
346
347 ctx->defer_alloc = 0;
348 }
349
350 /* Initialize the decoder instance on the first frame*/
351 if (!res && !ctx->decoder_init)
352 {
353 res = vp8_validate_mmaps(&ctx->si, ctx->mmaps, ctx->base.init_flags);
354
355 if (!res)
356 {
357 VP8D_CONFIG oxcf;
358 VP8D_PTR optr;
359
360 vp8dx_initialize();
361
362 oxcf.Width = ctx->si.w;
363 oxcf.Height = ctx->si.h;
364 oxcf.Version = 9;
365 oxcf.postprocess = 0;
366 oxcf.max_threads = ctx->cfg.threads;
367
368 optr = vp8dx_create_decompressor(&oxcf);
369
370 /* If postprocessing was enabled by the application and a
371 * configuration has not been provided, default it.
372 */
373 if (!ctx->postproc_cfg_set
374 && (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC))
375 {
376 ctx->postproc_cfg.post_proc_flag =
377 VP8_DEBLOCK | VP8_DEMACROBLOCK;
378 ctx->postproc_cfg.deblocking_level = 4;
379 ctx->postproc_cfg.noise_level = 0;
380 }
381
382 if (!optr)
383 res = VPX_CODEC_ERROR;
384 else
385 ctx->pbi = optr;
386 }
387
388 ctx->decoder_init = 1;
389 }
390
391 if (!res && ctx->pbi)
392 {
393 YV12_BUFFER_CONFIG sd;
394 INT64 time_stamp = 0, time_end_stamp = 0;
395 vp8_ppflags_t flags = {0};
396
397 if (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC)
398 {
399 flags.post_proc_flag= ctx->postproc_cfg.post_proc_flag
400 #if CONFIG_POSTPROC_VISUALIZER
401
402 | ((ctx->dbg_color_ref_frame_flag != 0) ? VP8D_DEBUG_CLR_FRM_REF_BLKS : 0)
403 | ((ctx->dbg_color_mb_modes_flag != 0) ? VP8D_DEBUG_CLR_BLK_MODES : 0)
404 | ((ctx->dbg_color_b_modes_flag != 0) ? VP8D_DEBUG_CLR_BLK_MODES : 0)
405 | ((ctx->dbg_display_mv_flag != 0) ? VP8D_DEBUG_DRAW_MV : 0)
406 #endif
407 ;
408 flags.deblocking_level = ctx->postproc_cfg.deblocking_level;
409 flags.noise_level = ctx->postproc_cfg.noise_level;
410 #if CONFIG_POSTPROC_VISUALIZER
411 flags.display_ref_frame_flag= ctx->dbg_color_ref_frame_flag;
412 flags.display_mb_modes_flag = ctx->dbg_color_mb_modes_flag;
413 flags.display_b_modes_flag = ctx->dbg_color_b_modes_flag;
414 flags.display_mv_flag = ctx->dbg_display_mv_flag;
415 #endif
416 }
417
418 if (vp8dx_receive_compressed_data(ctx->pbi, data_sz, data, deadline))
419 {
420 VP8D_COMP *pbi = (VP8D_COMP *)ctx->pbi;
421 res = update_error_state(ctx, &pbi->common.error);
422 }
423
424 if (!res && 0 == vp8dx_get_raw_frame(ctx->pbi, &sd, &time_stamp, &time_end_stamp, &flags))
425 {
426 /* Align width/height */
427 unsigned int a_w = (sd.y_width + 15) & ~15;
428 unsigned int a_h = (sd.y_height + 15) & ~15;
429
430 vpx_img_wrap(&ctx->img, VPX_IMG_FMT_I420,
431 a_w + 2 * VP8BORDERINPIXELS,
432 a_h + 2 * VP8BORDERINPIXELS,
433 1,
434 sd.buffer_alloc);
435 vpx_img_set_rect(&ctx->img,
436 VP8BORDERINPIXELS, VP8BORDERINPIXELS,
437 sd.y_width, sd.y_height);
438 ctx->img.user_priv = user_priv;
439 ctx->img_avail = 1;
440
441 }
442 }
443
444 return res;
445 }
446
vp8_get_frame(vpx_codec_alg_priv_t * ctx,vpx_codec_iter_t * iter)447 static vpx_image_t *vp8_get_frame(vpx_codec_alg_priv_t *ctx,
448 vpx_codec_iter_t *iter)
449 {
450 vpx_image_t *img = NULL;
451
452 if (ctx->img_avail)
453 {
454 /* iter acts as a flip flop, so an image is only returned on the first
455 * call to get_frame.
456 */
457 if (!(*iter))
458 {
459 img = &ctx->img;
460 *iter = img;
461 }
462 }
463
464 return img;
465 }
466
467
468 static
vp8_xma_get_mmap(const vpx_codec_ctx_t * ctx,vpx_codec_mmap_t * mmap,vpx_codec_iter_t * iter)469 vpx_codec_err_t vp8_xma_get_mmap(const vpx_codec_ctx_t *ctx,
470 vpx_codec_mmap_t *mmap,
471 vpx_codec_iter_t *iter)
472 {
473 vpx_codec_err_t res;
474 const mem_req_t *seg_iter = *iter;
475
476 /* Get address of next segment request */
477 do
478 {
479 if (!seg_iter)
480 seg_iter = vp8_mem_req_segs;
481 else if (seg_iter->id != VP8_SEG_MAX)
482 seg_iter++;
483
484 *iter = (vpx_codec_iter_t)seg_iter;
485
486 if (seg_iter->id != VP8_SEG_MAX)
487 {
488 mmap->id = seg_iter->id;
489 mmap->sz = seg_iter->sz;
490 mmap->align = seg_iter->align;
491 mmap->flags = seg_iter->flags;
492
493 if (!seg_iter->sz)
494 mmap->sz = seg_iter->calc_sz(ctx->config.dec, ctx->init_flags);
495
496 res = VPX_CODEC_OK;
497 }
498 else
499 res = VPX_CODEC_LIST_END;
500 }
501 while (!mmap->sz && res != VPX_CODEC_LIST_END);
502
503 return res;
504 }
505
vp8_xma_set_mmap(vpx_codec_ctx_t * ctx,const vpx_codec_mmap_t * mmap)506 static vpx_codec_err_t vp8_xma_set_mmap(vpx_codec_ctx_t *ctx,
507 const vpx_codec_mmap_t *mmap)
508 {
509 vpx_codec_err_t res = VPX_CODEC_MEM_ERROR;
510 int i, done;
511
512 if (!ctx->priv)
513 {
514 if (mmap->id == VP8_SEG_ALG_PRIV)
515 {
516 if (!ctx->priv)
517 {
518 vp8_init_ctx(ctx, mmap);
519 res = VPX_CODEC_OK;
520 }
521 }
522 }
523
524 done = 1;
525
526 if (!res && ctx->priv->alg_priv)
527 {
528 for (i = 0; i < NELEMENTS(ctx->priv->alg_priv->mmaps); i++)
529 {
530 if (ctx->priv->alg_priv->mmaps[i].id == mmap->id)
531 if (!ctx->priv->alg_priv->mmaps[i].base)
532 {
533 ctx->priv->alg_priv->mmaps[i] = *mmap;
534 res = VPX_CODEC_OK;
535 }
536
537 done &= (ctx->priv->alg_priv->mmaps[i].base != NULL);
538 }
539 }
540
541 if (done && !res)
542 {
543 vp8_finalize_mmaps(ctx->priv->alg_priv);
544 res = ctx->iface->init(ctx);
545 }
546
547 return res;
548 }
549
image2yuvconfig(const vpx_image_t * img,YV12_BUFFER_CONFIG * yv12)550 static vpx_codec_err_t image2yuvconfig(const vpx_image_t *img,
551 YV12_BUFFER_CONFIG *yv12)
552 {
553 vpx_codec_err_t res = VPX_CODEC_OK;
554 yv12->y_buffer = img->planes[VPX_PLANE_Y];
555 yv12->u_buffer = img->planes[VPX_PLANE_U];
556 yv12->v_buffer = img->planes[VPX_PLANE_V];
557
558 yv12->y_width = img->d_w;
559 yv12->y_height = img->d_h;
560 yv12->uv_width = yv12->y_width / 2;
561 yv12->uv_height = yv12->y_height / 2;
562
563 yv12->y_stride = img->stride[VPX_PLANE_Y];
564 yv12->uv_stride = img->stride[VPX_PLANE_U];
565
566 yv12->border = (img->stride[VPX_PLANE_Y] - img->d_w) / 2;
567 yv12->clrtype = (img->fmt == VPX_IMG_FMT_VPXI420 || img->fmt == VPX_IMG_FMT_VPXYV12);
568
569 return res;
570 }
571
572
vp8_set_reference(vpx_codec_alg_priv_t * ctx,int ctr_id,va_list args)573 static vpx_codec_err_t vp8_set_reference(vpx_codec_alg_priv_t *ctx,
574 int ctr_id,
575 va_list args)
576 {
577
578 vpx_ref_frame_t *data = va_arg(args, vpx_ref_frame_t *);
579
580 if (data)
581 {
582 vpx_ref_frame_t *frame = (vpx_ref_frame_t *)data;
583 YV12_BUFFER_CONFIG sd;
584
585 image2yuvconfig(&frame->img, &sd);
586
587 vp8dx_set_reference(ctx->pbi, frame->frame_type, &sd);
588 return VPX_CODEC_OK;
589 }
590 else
591 return VPX_CODEC_INVALID_PARAM;
592
593 }
594
vp8_get_reference(vpx_codec_alg_priv_t * ctx,int ctr_id,va_list args)595 static vpx_codec_err_t vp8_get_reference(vpx_codec_alg_priv_t *ctx,
596 int ctr_id,
597 va_list args)
598 {
599
600 vpx_ref_frame_t *data = va_arg(args, vpx_ref_frame_t *);
601
602 if (data)
603 {
604 vpx_ref_frame_t *frame = (vpx_ref_frame_t *)data;
605 YV12_BUFFER_CONFIG sd;
606
607 image2yuvconfig(&frame->img, &sd);
608
609 vp8dx_get_reference(ctx->pbi, frame->frame_type, &sd);
610 return VPX_CODEC_OK;
611 }
612 else
613 return VPX_CODEC_INVALID_PARAM;
614
615 }
616
vp8_set_postproc(vpx_codec_alg_priv_t * ctx,int ctr_id,va_list args)617 static vpx_codec_err_t vp8_set_postproc(vpx_codec_alg_priv_t *ctx,
618 int ctr_id,
619 va_list args)
620 {
621 vp8_postproc_cfg_t *data = va_arg(args, vp8_postproc_cfg_t *);
622 #if CONFIG_POSTPROC
623
624 if (data)
625 {
626 ctx->postproc_cfg_set = 1;
627 ctx->postproc_cfg = *((vp8_postproc_cfg_t *)data);
628 return VPX_CODEC_OK;
629 }
630 else
631 return VPX_CODEC_INVALID_PARAM;
632
633 #else
634 return VPX_CODEC_INCAPABLE;
635 #endif
636 }
637
vp8_set_dbg_options(vpx_codec_alg_priv_t * ctx,int ctrl_id,va_list args)638 static vpx_codec_err_t vp8_set_dbg_options(vpx_codec_alg_priv_t *ctx,
639 int ctrl_id,
640 va_list args)
641 {
642 #if CONFIG_POSTPROC_VISUALIZER && CONFIG_POSTPROC
643 int data = va_arg(args, int);
644
645 #define MAP(id, var) case id: var = data; break;
646
647 switch (ctrl_id)
648 {
649 MAP (VP8_SET_DBG_COLOR_REF_FRAME, ctx->dbg_color_ref_frame_flag);
650 MAP (VP8_SET_DBG_COLOR_MB_MODES, ctx->dbg_color_mb_modes_flag);
651 MAP (VP8_SET_DBG_COLOR_B_MODES, ctx->dbg_color_b_modes_flag);
652 MAP (VP8_SET_DBG_DISPLAY_MV, ctx->dbg_display_mv_flag);
653 }
654
655 return VPX_CODEC_OK;
656 #else
657 return VPX_CODEC_INCAPABLE;
658 #endif
659 }
660
vp8_get_last_ref_updates(vpx_codec_alg_priv_t * ctx,int ctrl_id,va_list args)661 static vpx_codec_err_t vp8_get_last_ref_updates(vpx_codec_alg_priv_t *ctx,
662 int ctrl_id,
663 va_list args)
664 {
665 int *update_info = va_arg(args, int *);
666 VP8D_COMP *pbi = (VP8D_COMP *)ctx->pbi;
667
668 if (update_info)
669 {
670 *update_info = pbi->common.refresh_alt_ref_frame * (int) VP8_ALTR_FRAME
671 + pbi->common.refresh_golden_frame * (int) VP8_GOLD_FRAME
672 + pbi->common.refresh_last_frame * (int) VP8_LAST_FRAME;
673
674 return VPX_CODEC_OK;
675 }
676 else
677 return VPX_CODEC_INVALID_PARAM;
678 }
679
680
vp8_get_frame_corrupted(vpx_codec_alg_priv_t * ctx,int ctrl_id,va_list args)681 static vpx_codec_err_t vp8_get_frame_corrupted(vpx_codec_alg_priv_t *ctx,
682 int ctrl_id,
683 va_list args)
684 {
685
686 int *corrupted = va_arg(args, int *);
687
688 if (corrupted)
689 {
690 VP8D_COMP *pbi = (VP8D_COMP *)ctx->pbi;
691 *corrupted = pbi->common.frame_to_show->corrupted;
692
693 return VPX_CODEC_OK;
694 }
695 else
696 return VPX_CODEC_INVALID_PARAM;
697
698 }
699
700 vpx_codec_ctrl_fn_map_t vp8_ctf_maps[] =
701 {
702 {VP8_SET_REFERENCE, vp8_set_reference},
703 {VP8_COPY_REFERENCE, vp8_get_reference},
704 {VP8_SET_POSTPROC, vp8_set_postproc},
705 {VP8_SET_DBG_COLOR_REF_FRAME, vp8_set_dbg_options},
706 {VP8_SET_DBG_COLOR_MB_MODES, vp8_set_dbg_options},
707 {VP8_SET_DBG_COLOR_B_MODES, vp8_set_dbg_options},
708 {VP8_SET_DBG_DISPLAY_MV, vp8_set_dbg_options},
709 {VP8D_GET_LAST_REF_UPDATES, vp8_get_last_ref_updates},
710 {VP8D_GET_FRAME_CORRUPTED, vp8_get_frame_corrupted},
711 { -1, NULL},
712 };
713
714
715 #ifndef VERSION_STRING
716 #define VERSION_STRING
717 #endif
718 CODEC_INTERFACE(vpx_codec_vp8_dx) =
719 {
720 "WebM Project VP8 Decoder" VERSION_STRING,
721 VPX_CODEC_INTERNAL_ABI_VERSION,
722 VPX_CODEC_CAP_DECODER | VP8_CAP_POSTPROC,
723 /* vpx_codec_caps_t caps; */
724 vp8_init, /* vpx_codec_init_fn_t init; */
725 vp8_destroy, /* vpx_codec_destroy_fn_t destroy; */
726 vp8_ctf_maps, /* vpx_codec_ctrl_fn_map_t *ctrl_maps; */
727 vp8_xma_get_mmap, /* vpx_codec_get_mmap_fn_t get_mmap; */
728 vp8_xma_set_mmap, /* vpx_codec_set_mmap_fn_t set_mmap; */
729 {
730 vp8_peek_si, /* vpx_codec_peek_si_fn_t peek_si; */
731 vp8_get_si, /* vpx_codec_get_si_fn_t get_si; */
732 vp8_decode, /* vpx_codec_decode_fn_t decode; */
733 vp8_get_frame, /* vpx_codec_frame_get_fn_t frame_get; */
734 },
735 { /* encoder functions */
736 NOT_IMPLEMENTED,
737 NOT_IMPLEMENTED,
738 NOT_IMPLEMENTED,
739 NOT_IMPLEMENTED,
740 NOT_IMPLEMENTED,
741 NOT_IMPLEMENTED
742 }
743 };
744
745 /*
746 * BEGIN BACKWARDS COMPATIBILITY SHIM.
747 */
748 vpx_codec_iface_t vpx_codec_vp8_algo =
749 {
750 "WebM Project VP8 Decoder (Deprecated API)" VERSION_STRING,
751 VPX_CODEC_INTERNAL_ABI_VERSION,
752 VPX_CODEC_CAP_DECODER | VP8_CAP_POSTPROC,
753 /* vpx_codec_caps_t caps; */
754 vp8_init, /* vpx_codec_init_fn_t init; */
755 vp8_destroy, /* vpx_codec_destroy_fn_t destroy; */
756 vp8_ctf_maps, /* vpx_codec_ctrl_fn_map_t *ctrl_maps; */
757 vp8_xma_get_mmap, /* vpx_codec_get_mmap_fn_t get_mmap; */
758 vp8_xma_set_mmap, /* vpx_codec_set_mmap_fn_t set_mmap; */
759 {
760 vp8_peek_si, /* vpx_codec_peek_si_fn_t peek_si; */
761 vp8_get_si, /* vpx_codec_get_si_fn_t get_si; */
762 vp8_decode, /* vpx_codec_decode_fn_t decode; */
763 vp8_get_frame, /* vpx_codec_frame_get_fn_t frame_get; */
764 },
765 { /* encoder functions */
766 NOT_IMPLEMENTED,
767 NOT_IMPLEMENTED,
768 NOT_IMPLEMENTED,
769 NOT_IMPLEMENTED,
770 NOT_IMPLEMENTED,
771 NOT_IMPLEMENTED
772 }
773 };
774