1 /*
2 * VC-1 and WMV3 decoder
3 * Copyright (c) 2011 Mashiat Sarker Shakkhar
4 * Copyright (c) 2006-2007 Konstantin Shishkov
5 * Partly based on vc9.c (c) 2005 Anonymous, Alex Beregszaszi, Michael Niedermayer
6 *
7 * This file is part of FFmpeg.
8 *
9 * FFmpeg is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Lesser General Public
11 * License as published by the Free Software Foundation; either
12 * version 2.1 of the License, or (at your option) any later version.
13 *
14 * FFmpeg is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Lesser General Public License for more details.
18 *
19 * You should have received a copy of the GNU Lesser General Public
20 * License along with FFmpeg; if not, write to the Free Software
21 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22 */
23
24 /**
25 * @file
26 * VC-1 and WMV3 decoder
27 */
28
29 #include "config_components.h"
30
31 #include "avcodec.h"
32 #include "blockdsp.h"
33 #include "codec_internal.h"
34 #include "get_bits.h"
35 #include "hwconfig.h"
36 #include "internal.h"
37 #include "mpeg_er.h"
38 #include "mpegvideo.h"
39 #include "mpegvideodec.h"
40 #include "msmpeg4data.h"
41 #include "msmpeg4dec.h"
42 #include "profiles.h"
43 #include "vc1.h"
44 #include "vc1data.h"
45 #include "libavutil/avassert.h"
46
47
48 #if CONFIG_WMV3IMAGE_DECODER || CONFIG_VC1IMAGE_DECODER
49
50 typedef struct SpriteData {
51 /**
52 * Transform coefficients for both sprites in 16.16 fixed point format,
53 * in the order they appear in the bitstream:
54 * x scale
55 * rotation 1 (unused)
56 * x offset
57 * rotation 2 (unused)
58 * y scale
59 * y offset
60 * alpha
61 */
62 int coefs[2][7];
63
64 int effect_type, effect_flag;
65 int effect_pcount1, effect_pcount2; ///< amount of effect parameters stored in effect_params
66 int effect_params1[15], effect_params2[10]; ///< effect parameters in 16.16 fixed point format
67 } SpriteData;
68
get_fp_val(GetBitContext * gb)69 static inline int get_fp_val(GetBitContext* gb)
70 {
71 return (get_bits_long(gb, 30) - (1 << 29)) << 1;
72 }
73
vc1_sprite_parse_transform(GetBitContext * gb,int c[7])74 static void vc1_sprite_parse_transform(GetBitContext* gb, int c[7])
75 {
76 c[1] = c[3] = 0;
77
78 switch (get_bits(gb, 2)) {
79 case 0:
80 c[0] = 1 << 16;
81 c[2] = get_fp_val(gb);
82 c[4] = 1 << 16;
83 break;
84 case 1:
85 c[0] = c[4] = get_fp_val(gb);
86 c[2] = get_fp_val(gb);
87 break;
88 case 2:
89 c[0] = get_fp_val(gb);
90 c[2] = get_fp_val(gb);
91 c[4] = get_fp_val(gb);
92 break;
93 case 3:
94 c[0] = get_fp_val(gb);
95 c[1] = get_fp_val(gb);
96 c[2] = get_fp_val(gb);
97 c[3] = get_fp_val(gb);
98 c[4] = get_fp_val(gb);
99 break;
100 }
101 c[5] = get_fp_val(gb);
102 if (get_bits1(gb))
103 c[6] = get_fp_val(gb);
104 else
105 c[6] = 1 << 16;
106 }
107
vc1_parse_sprites(VC1Context * v,GetBitContext * gb,SpriteData * sd)108 static int vc1_parse_sprites(VC1Context *v, GetBitContext* gb, SpriteData* sd)
109 {
110 AVCodecContext *avctx = v->s.avctx;
111 int sprite, i;
112
113 for (sprite = 0; sprite <= v->two_sprites; sprite++) {
114 vc1_sprite_parse_transform(gb, sd->coefs[sprite]);
115 if (sd->coefs[sprite][1] || sd->coefs[sprite][3])
116 avpriv_request_sample(avctx, "Non-zero rotation coefficients");
117 av_log(avctx, AV_LOG_DEBUG, sprite ? "S2:" : "S1:");
118 for (i = 0; i < 7; i++)
119 av_log(avctx, AV_LOG_DEBUG, " %d.%.3d",
120 sd->coefs[sprite][i] / (1<<16),
121 (abs(sd->coefs[sprite][i]) & 0xFFFF) * 1000 / (1 << 16));
122 av_log(avctx, AV_LOG_DEBUG, "\n");
123 }
124
125 skip_bits(gb, 2);
126 if (sd->effect_type = get_bits_long(gb, 30)) {
127 switch (sd->effect_pcount1 = get_bits(gb, 4)) {
128 case 7:
129 vc1_sprite_parse_transform(gb, sd->effect_params1);
130 break;
131 case 14:
132 vc1_sprite_parse_transform(gb, sd->effect_params1);
133 vc1_sprite_parse_transform(gb, sd->effect_params1 + 7);
134 break;
135 default:
136 for (i = 0; i < sd->effect_pcount1; i++)
137 sd->effect_params1[i] = get_fp_val(gb);
138 }
139 if (sd->effect_type != 13 || sd->effect_params1[0] != sd->coefs[0][6]) {
140 // effect 13 is simple alpha blending and matches the opacity above
141 av_log(avctx, AV_LOG_DEBUG, "Effect: %d; params: ", sd->effect_type);
142 for (i = 0; i < sd->effect_pcount1; i++)
143 av_log(avctx, AV_LOG_DEBUG, " %d.%.2d",
144 sd->effect_params1[i] / (1 << 16),
145 (abs(sd->effect_params1[i]) & 0xFFFF) * 1000 / (1 << 16));
146 av_log(avctx, AV_LOG_DEBUG, "\n");
147 }
148
149 sd->effect_pcount2 = get_bits(gb, 16);
150 if (sd->effect_pcount2 > 10) {
151 av_log(avctx, AV_LOG_ERROR, "Too many effect parameters\n");
152 return AVERROR_INVALIDDATA;
153 } else if (sd->effect_pcount2) {
154 i = -1;
155 av_log(avctx, AV_LOG_DEBUG, "Effect params 2: ");
156 while (++i < sd->effect_pcount2) {
157 sd->effect_params2[i] = get_fp_val(gb);
158 av_log(avctx, AV_LOG_DEBUG, " %d.%.2d",
159 sd->effect_params2[i] / (1 << 16),
160 (abs(sd->effect_params2[i]) & 0xFFFF) * 1000 / (1 << 16));
161 }
162 av_log(avctx, AV_LOG_DEBUG, "\n");
163 }
164 }
165 if (sd->effect_flag = get_bits1(gb))
166 av_log(avctx, AV_LOG_DEBUG, "Effect flag set\n");
167
168 if (get_bits_count(gb) >= gb->size_in_bits +
169 (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE ? 64 : 0)) {
170 av_log(avctx, AV_LOG_ERROR, "Buffer overrun\n");
171 return AVERROR_INVALIDDATA;
172 }
173 if (get_bits_count(gb) < gb->size_in_bits - 8)
174 av_log(avctx, AV_LOG_WARNING, "Buffer not fully read\n");
175
176 return 0;
177 }
178
vc1_draw_sprites(VC1Context * v,SpriteData * sd)179 static void vc1_draw_sprites(VC1Context *v, SpriteData* sd)
180 {
181 int i, plane, row, sprite;
182 int sr_cache[2][2] = { { -1, -1 }, { -1, -1 } };
183 uint8_t* src_h[2][2];
184 int xoff[2], xadv[2], yoff[2], yadv[2], alpha;
185 int ysub[2];
186 MpegEncContext *s = &v->s;
187
188 for (i = 0; i <= v->two_sprites; i++) {
189 xoff[i] = av_clip(sd->coefs[i][2], 0, v->sprite_width-1 << 16);
190 xadv[i] = sd->coefs[i][0];
191 if (xadv[i] != 1<<16 || (v->sprite_width << 16) - (v->output_width << 16) - xoff[i])
192 xadv[i] = av_clip(xadv[i], 0, ((v->sprite_width<<16) - xoff[i] - 1) / v->output_width);
193
194 yoff[i] = av_clip(sd->coefs[i][5], 0, v->sprite_height-1 << 16);
195 yadv[i] = av_clip(sd->coefs[i][4], 0, ((v->sprite_height << 16) - yoff[i]) / v->output_height);
196 }
197 alpha = av_clip_uint16(sd->coefs[1][6]);
198
199 for (plane = 0; plane < (CONFIG_GRAY && s->avctx->flags & AV_CODEC_FLAG_GRAY ? 1 : 3); plane++) {
200 int width = v->output_width>>!!plane;
201
202 for (row = 0; row < v->output_height>>!!plane; row++) {
203 uint8_t *dst = v->sprite_output_frame->data[plane] +
204 v->sprite_output_frame->linesize[plane] * row;
205
206 for (sprite = 0; sprite <= v->two_sprites; sprite++) {
207 uint8_t *iplane = s->current_picture.f->data[plane];
208 int iline = s->current_picture.f->linesize[plane];
209 int ycoord = yoff[sprite] + yadv[sprite] * row;
210 int yline = ycoord >> 16;
211 int next_line;
212 ysub[sprite] = ycoord & 0xFFFF;
213 if (sprite) {
214 iplane = s->last_picture.f->data[plane];
215 iline = s->last_picture.f->linesize[plane];
216 }
217 next_line = FFMIN(yline + 1, (v->sprite_height >> !!plane) - 1) * iline;
218 if (!(xoff[sprite] & 0xFFFF) && xadv[sprite] == 1 << 16) {
219 src_h[sprite][0] = iplane + (xoff[sprite] >> 16) + yline * iline;
220 if (ysub[sprite])
221 src_h[sprite][1] = iplane + (xoff[sprite] >> 16) + next_line;
222 } else {
223 if (sr_cache[sprite][0] != yline) {
224 if (sr_cache[sprite][1] == yline) {
225 FFSWAP(uint8_t*, v->sr_rows[sprite][0], v->sr_rows[sprite][1]);
226 FFSWAP(int, sr_cache[sprite][0], sr_cache[sprite][1]);
227 } else {
228 v->vc1dsp.sprite_h(v->sr_rows[sprite][0], iplane + yline * iline, xoff[sprite], xadv[sprite], width);
229 sr_cache[sprite][0] = yline;
230 }
231 }
232 if (ysub[sprite] && sr_cache[sprite][1] != yline + 1) {
233 v->vc1dsp.sprite_h(v->sr_rows[sprite][1],
234 iplane + next_line, xoff[sprite],
235 xadv[sprite], width);
236 sr_cache[sprite][1] = yline + 1;
237 }
238 src_h[sprite][0] = v->sr_rows[sprite][0];
239 src_h[sprite][1] = v->sr_rows[sprite][1];
240 }
241 }
242
243 if (!v->two_sprites) {
244 if (ysub[0]) {
245 v->vc1dsp.sprite_v_single(dst, src_h[0][0], src_h[0][1], ysub[0], width);
246 } else {
247 memcpy(dst, src_h[0][0], width);
248 }
249 } else {
250 if (ysub[0] && ysub[1]) {
251 v->vc1dsp.sprite_v_double_twoscale(dst, src_h[0][0], src_h[0][1], ysub[0],
252 src_h[1][0], src_h[1][1], ysub[1], alpha, width);
253 } else if (ysub[0]) {
254 v->vc1dsp.sprite_v_double_onescale(dst, src_h[0][0], src_h[0][1], ysub[0],
255 src_h[1][0], alpha, width);
256 } else if (ysub[1]) {
257 v->vc1dsp.sprite_v_double_onescale(dst, src_h[1][0], src_h[1][1], ysub[1],
258 src_h[0][0], (1<<16)-1-alpha, width);
259 } else {
260 v->vc1dsp.sprite_v_double_noscale(dst, src_h[0][0], src_h[1][0], alpha, width);
261 }
262 }
263 }
264
265 if (!plane) {
266 for (i = 0; i <= v->two_sprites; i++) {
267 xoff[i] >>= 1;
268 yoff[i] >>= 1;
269 }
270 }
271
272 }
273 }
274
275
vc1_decode_sprites(VC1Context * v,GetBitContext * gb)276 static int vc1_decode_sprites(VC1Context *v, GetBitContext* gb)
277 {
278 int ret;
279 MpegEncContext *s = &v->s;
280 AVCodecContext *avctx = s->avctx;
281 SpriteData sd;
282
283 memset(&sd, 0, sizeof(sd));
284
285 ret = vc1_parse_sprites(v, gb, &sd);
286 if (ret < 0)
287 return ret;
288
289 if (!s->current_picture.f || !s->current_picture.f->data[0]) {
290 av_log(avctx, AV_LOG_ERROR, "Got no sprites\n");
291 return AVERROR_UNKNOWN;
292 }
293
294 if (v->two_sprites && (!s->last_picture_ptr || !s->last_picture.f->data[0])) {
295 av_log(avctx, AV_LOG_WARNING, "Need two sprites, only got one\n");
296 v->two_sprites = 0;
297 }
298
299 av_frame_unref(v->sprite_output_frame);
300 if ((ret = ff_get_buffer(avctx, v->sprite_output_frame, 0)) < 0)
301 return ret;
302
303 vc1_draw_sprites(v, &sd);
304
305 return 0;
306 }
307
vc1_sprite_flush(AVCodecContext * avctx)308 static void vc1_sprite_flush(AVCodecContext *avctx)
309 {
310 VC1Context *v = avctx->priv_data;
311 MpegEncContext *s = &v->s;
312 AVFrame *f = s->current_picture.f;
313 int plane, i;
314
315 /* Windows Media Image codecs have a convergence interval of two keyframes.
316 Since we can't enforce it, clear to black the missing sprite. This is
317 wrong but it looks better than doing nothing. */
318
319 if (f && f->data[0])
320 for (plane = 0; plane < (CONFIG_GRAY && s->avctx->flags & AV_CODEC_FLAG_GRAY ? 1 : 3); plane++)
321 for (i = 0; i < v->sprite_height>>!!plane; i++)
322 memset(f->data[plane] + i * f->linesize[plane],
323 plane ? 128 : 0, f->linesize[plane]);
324 }
325
326 #endif
327
ff_vc1_decode_init_alloc_tables(VC1Context * v)328 av_cold int ff_vc1_decode_init_alloc_tables(VC1Context *v)
329 {
330 MpegEncContext *s = &v->s;
331 int i, ret = AVERROR(ENOMEM);
332 int mb_height = FFALIGN(s->mb_height, 2);
333
334 /* Allocate mb bitplanes */
335 v->mv_type_mb_plane = av_malloc (s->mb_stride * mb_height);
336 v->direct_mb_plane = av_malloc (s->mb_stride * mb_height);
337 v->forward_mb_plane = av_malloc (s->mb_stride * mb_height);
338 v->fieldtx_plane = av_mallocz(s->mb_stride * mb_height);
339 v->acpred_plane = av_malloc (s->mb_stride * mb_height);
340 v->over_flags_plane = av_malloc (s->mb_stride * mb_height);
341 if (!v->mv_type_mb_plane || !v->direct_mb_plane || !v->forward_mb_plane ||
342 !v->fieldtx_plane || !v->acpred_plane || !v->over_flags_plane)
343 goto error;
344
345 v->n_allocated_blks = s->mb_width + 2;
346 v->block = av_malloc(sizeof(*v->block) * v->n_allocated_blks);
347 v->cbp_base = av_malloc(sizeof(v->cbp_base[0]) * 3 * s->mb_stride);
348 if (!v->block || !v->cbp_base)
349 goto error;
350 v->cbp = v->cbp_base + 2 * s->mb_stride;
351 v->ttblk_base = av_malloc(sizeof(v->ttblk_base[0]) * 3 * s->mb_stride);
352 if (!v->ttblk_base)
353 goto error;
354 v->ttblk = v->ttblk_base + 2 * s->mb_stride;
355 v->is_intra_base = av_mallocz(sizeof(v->is_intra_base[0]) * 3 * s->mb_stride);
356 if (!v->is_intra_base)
357 goto error;
358 v->is_intra = v->is_intra_base + 2 * s->mb_stride;
359 v->luma_mv_base = av_mallocz(sizeof(v->luma_mv_base[0]) * 3 * s->mb_stride);
360 if (!v->luma_mv_base)
361 goto error;
362 v->luma_mv = v->luma_mv_base + 2 * s->mb_stride;
363
364 /* allocate block type info in that way so it could be used with s->block_index[] */
365 v->mb_type_base = av_malloc(s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
366 if (!v->mb_type_base)
367 goto error;
368 v->mb_type[0] = v->mb_type_base + s->b8_stride + 1;
369 v->mb_type[1] = v->mb_type_base + s->b8_stride * (mb_height * 2 + 1) + s->mb_stride + 1;
370 v->mb_type[2] = v->mb_type[1] + s->mb_stride * (mb_height + 1);
371
372 /* allocate memory to store block level MV info */
373 v->blk_mv_type_base = av_mallocz( s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
374 if (!v->blk_mv_type_base)
375 goto error;
376 v->blk_mv_type = v->blk_mv_type_base + s->b8_stride + 1;
377 v->mv_f_base = av_mallocz(2 * (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2));
378 if (!v->mv_f_base)
379 goto error;
380 v->mv_f[0] = v->mv_f_base + s->b8_stride + 1;
381 v->mv_f[1] = v->mv_f[0] + (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
382 v->mv_f_next_base = av_mallocz(2 * (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2));
383 if (!v->mv_f_next_base)
384 goto error;
385 v->mv_f_next[0] = v->mv_f_next_base + s->b8_stride + 1;
386 v->mv_f_next[1] = v->mv_f_next[0] + (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
387
388 if (s->avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || s->avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
389 for (i = 0; i < 4; i++)
390 if (!(v->sr_rows[i >> 1][i & 1] = av_malloc(v->output_width)))
391 goto error;
392 }
393
394 ret = ff_intrax8_common_init(s->avctx, &v->x8, &s->idsp,
395 s->block, s->block_last_index,
396 s->mb_width, s->mb_height);
397 if (ret < 0)
398 goto error;
399
400 return 0;
401
402 error:
403 ff_vc1_decode_end(s->avctx);
404 return ret;
405 }
406
ff_vc1_init_transposed_scantables(VC1Context * v)407 av_cold void ff_vc1_init_transposed_scantables(VC1Context *v)
408 {
409 int i;
410 for (i = 0; i < 64; i++) {
411 #define transpose(x) (((x) >> 3) | (((x) & 7) << 3))
412 v->zz_8x8[0][i] = transpose(ff_wmv1_scantable[0][i]);
413 v->zz_8x8[1][i] = transpose(ff_wmv1_scantable[1][i]);
414 v->zz_8x8[2][i] = transpose(ff_wmv1_scantable[2][i]);
415 v->zz_8x8[3][i] = transpose(ff_wmv1_scantable[3][i]);
416 v->zzi_8x8[i] = transpose(ff_vc1_adv_interlaced_8x8_zz[i]);
417 }
418 v->left_blk_sh = 0;
419 v->top_blk_sh = 3;
420 }
421
422 /** Initialize a VC1/WMV3 decoder
423 * @todo TODO: Handle VC-1 IDUs (Transport level?)
424 * @todo TODO: Decipher remaining bits in extra_data
425 */
vc1_decode_init(AVCodecContext * avctx)426 static av_cold int vc1_decode_init(AVCodecContext *avctx)
427 {
428 VC1Context *v = avctx->priv_data;
429 MpegEncContext *s = &v->s;
430 GetBitContext gb;
431 int ret;
432
433 /* save the container output size for WMImage */
434 v->output_width = avctx->width;
435 v->output_height = avctx->height;
436
437 if (!avctx->extradata_size || !avctx->extradata)
438 return AVERROR_INVALIDDATA;
439 v->s.avctx = avctx;
440
441 ff_vc1_init_common(v);
442
443 if (avctx->codec_id == AV_CODEC_ID_WMV3 || avctx->codec_id == AV_CODEC_ID_WMV3IMAGE) {
444 int count = 0;
445
446 // looks like WMV3 has a sequence header stored in the extradata
447 // advanced sequence header may be before the first frame
448 // the last byte of the extradata is a version number, 1 for the
449 // samples we can decode
450
451 ret = init_get_bits8(&gb, avctx->extradata, avctx->extradata_size);
452 if (ret < 0)
453 return ret;
454
455 if ((ret = ff_vc1_decode_sequence_header(avctx, v, &gb)) < 0)
456 return ret;
457
458 if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE && !v->res_sprite) {
459 avpriv_request_sample(avctx, "Non sprite WMV3IMAGE");
460 return AVERROR_PATCHWELCOME;
461 }
462
463 count = avctx->extradata_size*8 - get_bits_count(&gb);
464 if (count > 0) {
465 av_log(avctx, AV_LOG_INFO, "Extra data: %i bits left, value: %X\n",
466 count, get_bits_long(&gb, FFMIN(count, 32)));
467 } else if (count < 0) {
468 av_log(avctx, AV_LOG_INFO, "Read %i bits in overflow\n", -count);
469 }
470 } else { // VC1/WVC1/WVP2
471 const uint8_t *start = avctx->extradata;
472 uint8_t *end = avctx->extradata + avctx->extradata_size;
473 const uint8_t *next;
474 int size, buf2_size;
475 uint8_t *buf2 = NULL;
476 int seq_initialized = 0, ep_initialized = 0;
477
478 if (avctx->extradata_size < 16) {
479 av_log(avctx, AV_LOG_ERROR, "Extradata size too small: %i\n", avctx->extradata_size);
480 return AVERROR_INVALIDDATA;
481 }
482
483 buf2 = av_mallocz(avctx->extradata_size + AV_INPUT_BUFFER_PADDING_SIZE);
484 if (!buf2)
485 return AVERROR(ENOMEM);
486
487 start = find_next_marker(start, end); // in WVC1 extradata first byte is its size, but can be 0 in mkv
488 next = start;
489 for (; next < end; start = next) {
490 next = find_next_marker(start + 4, end);
491 size = next - start - 4;
492 if (size <= 0)
493 continue;
494 buf2_size = v->vc1dsp.vc1_unescape_buffer(start + 4, size, buf2);
495 init_get_bits(&gb, buf2, buf2_size * 8);
496 switch (AV_RB32(start)) {
497 case VC1_CODE_SEQHDR:
498 if ((ret = ff_vc1_decode_sequence_header(avctx, v, &gb)) < 0) {
499 av_free(buf2);
500 return ret;
501 }
502 seq_initialized = 1;
503 break;
504 case VC1_CODE_ENTRYPOINT:
505 if ((ret = ff_vc1_decode_entry_point(avctx, v, &gb)) < 0) {
506 av_free(buf2);
507 return ret;
508 }
509 ep_initialized = 1;
510 break;
511 }
512 }
513 av_free(buf2);
514 if (!seq_initialized || !ep_initialized) {
515 av_log(avctx, AV_LOG_ERROR, "Incomplete extradata\n");
516 return AVERROR_INVALIDDATA;
517 }
518 v->res_sprite = (avctx->codec_id == AV_CODEC_ID_VC1IMAGE);
519 }
520
521 avctx->profile = v->profile;
522 if (v->profile == PROFILE_ADVANCED)
523 avctx->level = v->level;
524
525 if (!CONFIG_GRAY || !(avctx->flags & AV_CODEC_FLAG_GRAY))
526 avctx->pix_fmt = ff_get_format(avctx, avctx->codec->pix_fmts);
527 else {
528 avctx->pix_fmt = AV_PIX_FMT_GRAY8;
529 if (avctx->color_range == AVCOL_RANGE_UNSPECIFIED)
530 avctx->color_range = AVCOL_RANGE_MPEG;
531 }
532
533 // ensure static VLC tables are initialized
534 if ((ret = ff_msmpeg4_decode_init(avctx)) < 0)
535 return ret;
536 if ((ret = ff_vc1_decode_init_alloc_tables(v)) < 0)
537 return ret;
538 // Hack to ensure the above functions will be called
539 // again once we know all necessary settings.
540 // That this is necessary might indicate a bug.
541 ff_vc1_decode_end(avctx);
542
543 ff_blockdsp_init(&s->bdsp, avctx);
544 ff_h264chroma_init(&v->h264chroma, 8);
545 ff_qpeldsp_init(&s->qdsp);
546
547 avctx->has_b_frames = !!avctx->max_b_frames;
548
549 if (v->color_prim == 1 || v->color_prim == 5 || v->color_prim == 6)
550 avctx->color_primaries = v->color_prim;
551 if (v->transfer_char == 1 || v->transfer_char == 7)
552 avctx->color_trc = v->transfer_char;
553 if (v->matrix_coef == 1 || v->matrix_coef == 6 || v->matrix_coef == 7)
554 avctx->colorspace = v->matrix_coef;
555
556 s->mb_width = (avctx->coded_width + 15) >> 4;
557 s->mb_height = (avctx->coded_height + 15) >> 4;
558
559 if (v->profile == PROFILE_ADVANCED || v->res_fasttx) {
560 ff_vc1_init_transposed_scantables(v);
561 } else {
562 memcpy(v->zz_8x8, ff_wmv1_scantable, 4*64);
563 v->left_blk_sh = 3;
564 v->top_blk_sh = 0;
565 }
566
567 if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
568 v->sprite_width = avctx->coded_width;
569 v->sprite_height = avctx->coded_height;
570
571 avctx->coded_width = avctx->width = v->output_width;
572 avctx->coded_height = avctx->height = v->output_height;
573
574 // prevent 16.16 overflows
575 if (v->sprite_width > 1 << 14 ||
576 v->sprite_height > 1 << 14 ||
577 v->output_width > 1 << 14 ||
578 v->output_height > 1 << 14) {
579 return AVERROR_INVALIDDATA;
580 }
581
582 if ((v->sprite_width&1) || (v->sprite_height&1)) {
583 avpriv_request_sample(avctx, "odd sprites support");
584 return AVERROR_PATCHWELCOME;
585 }
586 }
587 return 0;
588 }
589
590 /** Close a VC1/WMV3 decoder
591 * @warning Initial try at using MpegEncContext stuff
592 */
ff_vc1_decode_end(AVCodecContext * avctx)593 av_cold int ff_vc1_decode_end(AVCodecContext *avctx)
594 {
595 VC1Context *v = avctx->priv_data;
596 int i;
597
598 av_frame_free(&v->sprite_output_frame);
599
600 for (i = 0; i < 4; i++)
601 av_freep(&v->sr_rows[i >> 1][i & 1]);
602 ff_mpv_common_end(&v->s);
603 av_freep(&v->mv_type_mb_plane);
604 av_freep(&v->direct_mb_plane);
605 av_freep(&v->forward_mb_plane);
606 av_freep(&v->fieldtx_plane);
607 av_freep(&v->acpred_plane);
608 av_freep(&v->over_flags_plane);
609 av_freep(&v->mb_type_base);
610 av_freep(&v->blk_mv_type_base);
611 av_freep(&v->mv_f_base);
612 av_freep(&v->mv_f_next_base);
613 av_freep(&v->block);
614 av_freep(&v->cbp_base);
615 av_freep(&v->ttblk_base);
616 av_freep(&v->is_intra_base); // FIXME use v->mb_type[]
617 av_freep(&v->luma_mv_base);
618 ff_intrax8_common_end(&v->x8);
619 return 0;
620 }
621
622
623 /** Decode a VC1/WMV3 frame
624 * @todo TODO: Handle VC-1 IDUs (Transport level?)
625 */
vc1_decode_frame(AVCodecContext * avctx,AVFrame * pict,int * got_frame,AVPacket * avpkt)626 static int vc1_decode_frame(AVCodecContext *avctx, AVFrame *pict,
627 int *got_frame, AVPacket *avpkt)
628 {
629 const uint8_t *buf = avpkt->data;
630 int buf_size = avpkt->size, n_slices = 0, i, ret;
631 VC1Context *v = avctx->priv_data;
632 MpegEncContext *s = &v->s;
633 uint8_t *buf2 = NULL;
634 const uint8_t *buf_start = buf, *buf_start_second_field = NULL;
635 int mb_height, n_slices1=-1;
636 struct {
637 uint8_t *buf;
638 GetBitContext gb;
639 int mby_start;
640 const uint8_t *rawbuf;
641 int raw_size;
642 } *slices = NULL, *tmp;
643
644 v->second_field = 0;
645
646 if(s->avctx->flags & AV_CODEC_FLAG_LOW_DELAY)
647 s->low_delay = 1;
648
649 /* no supplementary picture */
650 if (buf_size == 0 || (buf_size == 4 && AV_RB32(buf) == VC1_CODE_ENDOFSEQ)) {
651 /* special case for last picture */
652 if (s->low_delay == 0 && s->next_picture_ptr) {
653 if ((ret = av_frame_ref(pict, s->next_picture_ptr->f)) < 0)
654 return ret;
655 s->next_picture_ptr = NULL;
656
657 *got_frame = 1;
658 }
659
660 return buf_size;
661 }
662
663 //for advanced profile we may need to parse and unescape data
664 if (avctx->codec_id == AV_CODEC_ID_VC1 || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
665 int buf_size2 = 0;
666 buf2 = av_mallocz(buf_size + AV_INPUT_BUFFER_PADDING_SIZE);
667 if (!buf2)
668 return AVERROR(ENOMEM);
669
670 if (IS_MARKER(AV_RB32(buf))) { /* frame starts with marker and needs to be parsed */
671 const uint8_t *start, *end, *next;
672 int size;
673
674 next = buf;
675 for (start = buf, end = buf + buf_size; next < end; start = next) {
676 next = find_next_marker(start + 4, end);
677 size = next - start - 4;
678 if (size <= 0) continue;
679 switch (AV_RB32(start)) {
680 case VC1_CODE_FRAME:
681 if (avctx->hwaccel)
682 buf_start = start;
683 buf_size2 = v->vc1dsp.vc1_unescape_buffer(start + 4, size, buf2);
684 break;
685 case VC1_CODE_FIELD: {
686 int buf_size3;
687 if (avctx->hwaccel)
688 buf_start_second_field = start;
689 tmp = av_realloc_array(slices, sizeof(*slices), n_slices+1);
690 if (!tmp) {
691 ret = AVERROR(ENOMEM);
692 goto err;
693 }
694 slices = tmp;
695 slices[n_slices].buf = av_mallocz(size + AV_INPUT_BUFFER_PADDING_SIZE);
696 if (!slices[n_slices].buf) {
697 ret = AVERROR(ENOMEM);
698 goto err;
699 }
700 buf_size3 = v->vc1dsp.vc1_unescape_buffer(start + 4, size,
701 slices[n_slices].buf);
702 init_get_bits(&slices[n_slices].gb, slices[n_slices].buf,
703 buf_size3 << 3);
704 slices[n_slices].mby_start = avctx->coded_height + 31 >> 5;
705 slices[n_slices].rawbuf = start;
706 slices[n_slices].raw_size = size + 4;
707 n_slices1 = n_slices - 1; // index of the last slice of the first field
708 n_slices++;
709 break;
710 }
711 case VC1_CODE_ENTRYPOINT: /* it should be before frame data */
712 buf_size2 = v->vc1dsp.vc1_unescape_buffer(start + 4, size, buf2);
713 init_get_bits(&s->gb, buf2, buf_size2 * 8);
714 ff_vc1_decode_entry_point(avctx, v, &s->gb);
715 break;
716 case VC1_CODE_SLICE: {
717 int buf_size3;
718 tmp = av_realloc_array(slices, sizeof(*slices), n_slices+1);
719 if (!tmp) {
720 ret = AVERROR(ENOMEM);
721 goto err;
722 }
723 slices = tmp;
724 slices[n_slices].buf = av_mallocz(size + AV_INPUT_BUFFER_PADDING_SIZE);
725 if (!slices[n_slices].buf) {
726 ret = AVERROR(ENOMEM);
727 goto err;
728 }
729 buf_size3 = v->vc1dsp.vc1_unescape_buffer(start + 4, size,
730 slices[n_slices].buf);
731 init_get_bits(&slices[n_slices].gb, slices[n_slices].buf,
732 buf_size3 << 3);
733 slices[n_slices].mby_start = get_bits(&slices[n_slices].gb, 9);
734 slices[n_slices].rawbuf = start;
735 slices[n_slices].raw_size = size + 4;
736 n_slices++;
737 break;
738 }
739 }
740 }
741 } else if (v->interlace && ((buf[0] & 0xC0) == 0xC0)) { /* WVC1 interlaced stores both fields divided by marker */
742 const uint8_t *divider;
743 int buf_size3;
744
745 divider = find_next_marker(buf, buf + buf_size);
746 if ((divider == (buf + buf_size)) || AV_RB32(divider) != VC1_CODE_FIELD) {
747 av_log(avctx, AV_LOG_ERROR, "Error in WVC1 interlaced frame\n");
748 ret = AVERROR_INVALIDDATA;
749 goto err;
750 } else { // found field marker, unescape second field
751 if (avctx->hwaccel)
752 buf_start_second_field = divider;
753 tmp = av_realloc_array(slices, sizeof(*slices), n_slices+1);
754 if (!tmp) {
755 ret = AVERROR(ENOMEM);
756 goto err;
757 }
758 slices = tmp;
759 slices[n_slices].buf = av_mallocz(buf_size + AV_INPUT_BUFFER_PADDING_SIZE);
760 if (!slices[n_slices].buf) {
761 ret = AVERROR(ENOMEM);
762 goto err;
763 }
764 buf_size3 = v->vc1dsp.vc1_unescape_buffer(divider + 4, buf + buf_size - divider - 4, slices[n_slices].buf);
765 init_get_bits(&slices[n_slices].gb, slices[n_slices].buf,
766 buf_size3 << 3);
767 slices[n_slices].mby_start = s->mb_height + 1 >> 1;
768 slices[n_slices].rawbuf = divider;
769 slices[n_slices].raw_size = buf + buf_size - divider;
770 n_slices1 = n_slices - 1;
771 n_slices++;
772 }
773 buf_size2 = v->vc1dsp.vc1_unescape_buffer(buf, divider - buf, buf2);
774 } else {
775 buf_size2 = v->vc1dsp.vc1_unescape_buffer(buf, buf_size, buf2);
776 }
777 init_get_bits(&s->gb, buf2, buf_size2*8);
778 } else{
779 ret = init_get_bits8(&s->gb, buf, buf_size);
780 if (ret < 0)
781 return ret;
782 }
783
784 if (v->res_sprite) {
785 v->new_sprite = !get_bits1(&s->gb);
786 v->two_sprites = get_bits1(&s->gb);
787 /* res_sprite means a Windows Media Image stream, AV_CODEC_ID_*IMAGE means
788 we're using the sprite compositor. These are intentionally kept separate
789 so you can get the raw sprites by using the wmv3 decoder for WMVP or
790 the vc1 one for WVP2 */
791 if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
792 if (v->new_sprite) {
793 // switch AVCodecContext parameters to those of the sprites
794 avctx->width = avctx->coded_width = v->sprite_width;
795 avctx->height = avctx->coded_height = v->sprite_height;
796 } else {
797 goto image;
798 }
799 }
800 }
801
802 if (s->context_initialized &&
803 (s->width != avctx->coded_width ||
804 s->height != avctx->coded_height)) {
805 ff_vc1_decode_end(avctx);
806 }
807
808 if (!s->context_initialized) {
809 if ((ret = ff_msmpeg4_decode_init(avctx)) < 0)
810 goto err;
811 if ((ret = ff_vc1_decode_init_alloc_tables(v)) < 0) {
812 ff_mpv_common_end(s);
813 goto err;
814 }
815
816 s->low_delay = !avctx->has_b_frames || v->res_sprite;
817
818 if (v->profile == PROFILE_ADVANCED) {
819 if(avctx->coded_width<=1 || avctx->coded_height<=1) {
820 ret = AVERROR_INVALIDDATA;
821 goto err;
822 }
823 s->h_edge_pos = avctx->coded_width;
824 s->v_edge_pos = avctx->coded_height;
825 }
826 }
827
828 // do parse frame header
829 v->pic_header_flag = 0;
830 v->first_pic_header_flag = 1;
831 if (v->profile < PROFILE_ADVANCED) {
832 if ((ret = ff_vc1_parse_frame_header(v, &s->gb)) < 0) {
833 goto err;
834 }
835 } else {
836 if ((ret = ff_vc1_parse_frame_header_adv(v, &s->gb)) < 0) {
837 goto err;
838 }
839 }
840 v->first_pic_header_flag = 0;
841
842 if (avctx->debug & FF_DEBUG_PICT_INFO)
843 av_log(v->s.avctx, AV_LOG_DEBUG, "pict_type: %c\n", av_get_picture_type_char(s->pict_type));
844
845 if ((avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE)
846 && s->pict_type != AV_PICTURE_TYPE_I) {
847 av_log(v->s.avctx, AV_LOG_ERROR, "Sprite decoder: expected I-frame\n");
848 ret = AVERROR_INVALIDDATA;
849 goto err;
850 }
851 if ((avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE)
852 && v->field_mode) {
853 av_log(v->s.avctx, AV_LOG_ERROR, "Sprite decoder: expected Frames not Fields\n");
854 ret = AVERROR_INVALIDDATA;
855 goto err;
856 }
857 if ((s->mb_height >> v->field_mode) == 0) {
858 av_log(v->s.avctx, AV_LOG_ERROR, "image too short\n");
859 ret = AVERROR_INVALIDDATA;
860 goto err;
861 }
862
863 // for skipping the frame
864 s->current_picture.f->pict_type = s->pict_type;
865 s->current_picture.f->key_frame = s->pict_type == AV_PICTURE_TYPE_I;
866
867 /* skip B-frames if we don't have reference frames */
868 if (!s->last_picture_ptr && s->pict_type == AV_PICTURE_TYPE_B) {
869 av_log(v->s.avctx, AV_LOG_DEBUG, "Skipping B frame without reference frames\n");
870 goto end;
871 }
872 if ((avctx->skip_frame >= AVDISCARD_NONREF && s->pict_type == AV_PICTURE_TYPE_B) ||
873 (avctx->skip_frame >= AVDISCARD_NONKEY && s->pict_type != AV_PICTURE_TYPE_I) ||
874 avctx->skip_frame >= AVDISCARD_ALL) {
875 goto end;
876 }
877
878 if ((ret = ff_mpv_frame_start(s, avctx)) < 0) {
879 goto err;
880 }
881
882 v->s.current_picture_ptr->field_picture = v->field_mode;
883 v->s.current_picture_ptr->f->interlaced_frame = (v->fcm != PROGRESSIVE);
884 v->s.current_picture_ptr->f->top_field_first = v->tff;
885
886 // process pulldown flags
887 s->current_picture_ptr->f->repeat_pict = 0;
888 // Pulldown flags are only valid when 'broadcast' has been set.
889 // So ticks_per_frame will be 2
890 if (v->rff) {
891 // repeat field
892 s->current_picture_ptr->f->repeat_pict = 1;
893 } else if (v->rptfrm) {
894 // repeat frames
895 s->current_picture_ptr->f->repeat_pict = v->rptfrm * 2;
896 }
897
898 s->me.qpel_put = s->qdsp.put_qpel_pixels_tab;
899 s->me.qpel_avg = s->qdsp.avg_qpel_pixels_tab;
900
901 if (avctx->hwaccel) {
902 s->mb_y = 0;
903 if (v->field_mode && buf_start_second_field) {
904 // decode first field
905 s->picture_structure = PICT_BOTTOM_FIELD - v->tff;
906 if ((ret = avctx->hwaccel->start_frame(avctx, buf_start, buf_start_second_field - buf_start)) < 0)
907 goto err;
908
909 if (n_slices1 == -1) {
910 // no slices, decode the field as-is
911 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start, buf_start_second_field - buf_start)) < 0)
912 goto err;
913 } else {
914 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start, slices[0].rawbuf - buf_start)) < 0)
915 goto err;
916
917 for (i = 0 ; i < n_slices1 + 1; i++) {
918 s->gb = slices[i].gb;
919 s->mb_y = slices[i].mby_start;
920
921 v->pic_header_flag = get_bits1(&s->gb);
922 if (v->pic_header_flag) {
923 if (ff_vc1_parse_frame_header_adv(v, &s->gb) < 0) {
924 av_log(v->s.avctx, AV_LOG_ERROR, "Slice header damaged\n");
925 ret = AVERROR_INVALIDDATA;
926 if (avctx->err_recognition & AV_EF_EXPLODE)
927 goto err;
928 continue;
929 }
930 }
931
932 if ((ret = avctx->hwaccel->decode_slice(avctx, slices[i].rawbuf, slices[i].raw_size)) < 0)
933 goto err;
934 }
935 }
936
937 if ((ret = avctx->hwaccel->end_frame(avctx)) < 0)
938 goto err;
939
940 // decode second field
941 s->gb = slices[n_slices1 + 1].gb;
942 s->mb_y = slices[n_slices1 + 1].mby_start;
943 s->picture_structure = PICT_TOP_FIELD + v->tff;
944 v->second_field = 1;
945 v->pic_header_flag = 0;
946 if (ff_vc1_parse_frame_header_adv(v, &s->gb) < 0) {
947 av_log(avctx, AV_LOG_ERROR, "parsing header for second field failed");
948 ret = AVERROR_INVALIDDATA;
949 goto err;
950 }
951 v->s.current_picture_ptr->f->pict_type = v->s.pict_type;
952
953 if ((ret = avctx->hwaccel->start_frame(avctx, buf_start_second_field, (buf + buf_size) - buf_start_second_field)) < 0)
954 goto err;
955
956 if (n_slices - n_slices1 == 2) {
957 // no slices, decode the field as-is
958 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start_second_field, (buf + buf_size) - buf_start_second_field)) < 0)
959 goto err;
960 } else {
961 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start_second_field, slices[n_slices1 + 2].rawbuf - buf_start_second_field)) < 0)
962 goto err;
963
964 for (i = n_slices1 + 2; i < n_slices; i++) {
965 s->gb = slices[i].gb;
966 s->mb_y = slices[i].mby_start;
967
968 v->pic_header_flag = get_bits1(&s->gb);
969 if (v->pic_header_flag) {
970 if (ff_vc1_parse_frame_header_adv(v, &s->gb) < 0) {
971 av_log(v->s.avctx, AV_LOG_ERROR, "Slice header damaged\n");
972 ret = AVERROR_INVALIDDATA;
973 if (avctx->err_recognition & AV_EF_EXPLODE)
974 goto err;
975 continue;
976 }
977 }
978
979 if ((ret = avctx->hwaccel->decode_slice(avctx, slices[i].rawbuf, slices[i].raw_size)) < 0)
980 goto err;
981 }
982 }
983
984 if ((ret = avctx->hwaccel->end_frame(avctx)) < 0)
985 goto err;
986 } else {
987 s->picture_structure = PICT_FRAME;
988 if ((ret = avctx->hwaccel->start_frame(avctx, buf_start, (buf + buf_size) - buf_start)) < 0)
989 goto err;
990
991 if (n_slices == 0) {
992 // no slices, decode the frame as-is
993 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start, (buf + buf_size) - buf_start)) < 0)
994 goto err;
995 } else {
996 // decode the frame part as the first slice
997 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start, slices[0].rawbuf - buf_start)) < 0)
998 goto err;
999
1000 // and process the slices as additional slices afterwards
1001 for (i = 0 ; i < n_slices; i++) {
1002 s->gb = slices[i].gb;
1003 s->mb_y = slices[i].mby_start;
1004
1005 v->pic_header_flag = get_bits1(&s->gb);
1006 if (v->pic_header_flag) {
1007 if (ff_vc1_parse_frame_header_adv(v, &s->gb) < 0) {
1008 av_log(v->s.avctx, AV_LOG_ERROR, "Slice header damaged\n");
1009 ret = AVERROR_INVALIDDATA;
1010 if (avctx->err_recognition & AV_EF_EXPLODE)
1011 goto err;
1012 continue;
1013 }
1014 }
1015
1016 if ((ret = avctx->hwaccel->decode_slice(avctx, slices[i].rawbuf, slices[i].raw_size)) < 0)
1017 goto err;
1018 }
1019 }
1020 if ((ret = avctx->hwaccel->end_frame(avctx)) < 0)
1021 goto err;
1022 }
1023 } else {
1024 int header_ret = 0;
1025
1026 ff_mpeg_er_frame_start(s);
1027
1028 v->end_mb_x = s->mb_width;
1029 if (v->field_mode) {
1030 s->current_picture.f->linesize[0] <<= 1;
1031 s->current_picture.f->linesize[1] <<= 1;
1032 s->current_picture.f->linesize[2] <<= 1;
1033 s->linesize <<= 1;
1034 s->uvlinesize <<= 1;
1035 }
1036 mb_height = s->mb_height >> v->field_mode;
1037
1038 av_assert0 (mb_height > 0);
1039
1040 for (i = 0; i <= n_slices; i++) {
1041 if (i > 0 && slices[i - 1].mby_start >= mb_height) {
1042 if (v->field_mode <= 0) {
1043 av_log(v->s.avctx, AV_LOG_ERROR, "Slice %d starts beyond "
1044 "picture boundary (%d >= %d)\n", i,
1045 slices[i - 1].mby_start, mb_height);
1046 continue;
1047 }
1048 v->second_field = 1;
1049 av_assert0((s->mb_height & 1) == 0);
1050 v->blocks_off = s->b8_stride * (s->mb_height&~1);
1051 v->mb_off = s->mb_stride * s->mb_height >> 1;
1052 } else {
1053 v->second_field = 0;
1054 v->blocks_off = 0;
1055 v->mb_off = 0;
1056 }
1057 if (i) {
1058 v->pic_header_flag = 0;
1059 if (v->field_mode && i == n_slices1 + 2) {
1060 if ((header_ret = ff_vc1_parse_frame_header_adv(v, &s->gb)) < 0) {
1061 av_log(v->s.avctx, AV_LOG_ERROR, "Field header damaged\n");
1062 ret = AVERROR_INVALIDDATA;
1063 if (avctx->err_recognition & AV_EF_EXPLODE)
1064 goto err;
1065 continue;
1066 }
1067 } else if (get_bits1(&s->gb)) {
1068 v->pic_header_flag = 1;
1069 if ((header_ret = ff_vc1_parse_frame_header_adv(v, &s->gb)) < 0) {
1070 av_log(v->s.avctx, AV_LOG_ERROR, "Slice header damaged\n");
1071 ret = AVERROR_INVALIDDATA;
1072 if (avctx->err_recognition & AV_EF_EXPLODE)
1073 goto err;
1074 continue;
1075 }
1076 }
1077 }
1078 if (header_ret < 0)
1079 continue;
1080 s->start_mb_y = (i == 0) ? 0 : FFMAX(0, slices[i-1].mby_start % mb_height);
1081 if (!v->field_mode || v->second_field)
1082 s->end_mb_y = (i == n_slices ) ? mb_height : FFMIN(mb_height, slices[i].mby_start % mb_height);
1083 else {
1084 if (i >= n_slices) {
1085 av_log(v->s.avctx, AV_LOG_ERROR, "first field slice count too large\n");
1086 continue;
1087 }
1088 s->end_mb_y = (i == n_slices1 + 1) ? mb_height : FFMIN(mb_height, slices[i].mby_start % mb_height);
1089 }
1090 if (s->end_mb_y <= s->start_mb_y) {
1091 av_log(v->s.avctx, AV_LOG_ERROR, "end mb y %d %d invalid\n", s->end_mb_y, s->start_mb_y);
1092 continue;
1093 }
1094 if (((s->pict_type == AV_PICTURE_TYPE_P && !v->p_frame_skipped) ||
1095 (s->pict_type == AV_PICTURE_TYPE_B && !v->bi_type)) &&
1096 !v->cbpcy_vlc) {
1097 av_log(v->s.avctx, AV_LOG_ERROR, "missing cbpcy_vlc\n");
1098 continue;
1099 }
1100 ff_vc1_decode_blocks(v);
1101 if (i != n_slices) {
1102 s->gb = slices[i].gb;
1103 }
1104 }
1105 if (v->field_mode) {
1106 v->second_field = 0;
1107 s->current_picture.f->linesize[0] >>= 1;
1108 s->current_picture.f->linesize[1] >>= 1;
1109 s->current_picture.f->linesize[2] >>= 1;
1110 s->linesize >>= 1;
1111 s->uvlinesize >>= 1;
1112 if (v->s.pict_type != AV_PICTURE_TYPE_BI && v->s.pict_type != AV_PICTURE_TYPE_B) {
1113 FFSWAP(uint8_t *, v->mv_f_next[0], v->mv_f[0]);
1114 FFSWAP(uint8_t *, v->mv_f_next[1], v->mv_f[1]);
1115 }
1116 }
1117 ff_dlog(s->avctx, "Consumed %i/%i bits\n",
1118 get_bits_count(&s->gb), s->gb.size_in_bits);
1119 // if (get_bits_count(&s->gb) > buf_size * 8)
1120 // return -1;
1121 if(s->er.error_occurred && s->pict_type == AV_PICTURE_TYPE_B) {
1122 ret = AVERROR_INVALIDDATA;
1123 goto err;
1124 }
1125 if ( !v->field_mode
1126 && avctx->codec_id != AV_CODEC_ID_WMV3IMAGE
1127 && avctx->codec_id != AV_CODEC_ID_VC1IMAGE)
1128 ff_er_frame_end(&s->er);
1129 }
1130
1131 ff_mpv_frame_end(s);
1132
1133 if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
1134 image:
1135 avctx->width = avctx->coded_width = v->output_width;
1136 avctx->height = avctx->coded_height = v->output_height;
1137 if (avctx->skip_frame >= AVDISCARD_NONREF)
1138 goto end;
1139 if (!v->sprite_output_frame &&
1140 !(v->sprite_output_frame = av_frame_alloc())) {
1141 ret = AVERROR(ENOMEM);
1142 goto err;
1143 }
1144 #if CONFIG_WMV3IMAGE_DECODER || CONFIG_VC1IMAGE_DECODER
1145 if ((ret = vc1_decode_sprites(v, &s->gb)) < 0)
1146 goto err;
1147 #endif
1148 if ((ret = av_frame_ref(pict, v->sprite_output_frame)) < 0)
1149 goto err;
1150 *got_frame = 1;
1151 } else {
1152 if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) {
1153 if ((ret = av_frame_ref(pict, s->current_picture_ptr->f)) < 0)
1154 goto err;
1155 if (!v->field_mode)
1156 ff_print_debug_info(s, s->current_picture_ptr, pict);
1157 *got_frame = 1;
1158 } else if (s->last_picture_ptr) {
1159 if ((ret = av_frame_ref(pict, s->last_picture_ptr->f)) < 0)
1160 goto err;
1161 if (!v->field_mode)
1162 ff_print_debug_info(s, s->last_picture_ptr, pict);
1163 *got_frame = 1;
1164 }
1165 }
1166
1167 end:
1168 av_free(buf2);
1169 for (i = 0; i < n_slices; i++)
1170 av_free(slices[i].buf);
1171 av_free(slices);
1172 return buf_size;
1173
1174 err:
1175 av_free(buf2);
1176 for (i = 0; i < n_slices; i++)
1177 av_free(slices[i].buf);
1178 av_free(slices);
1179 return ret;
1180 }
1181
1182
1183 static const enum AVPixelFormat vc1_hwaccel_pixfmt_list_420[] = {
1184 #if CONFIG_VC1_DXVA2_HWACCEL
1185 AV_PIX_FMT_DXVA2_VLD,
1186 #endif
1187 #if CONFIG_VC1_D3D11VA_HWACCEL
1188 AV_PIX_FMT_D3D11VA_VLD,
1189 AV_PIX_FMT_D3D11,
1190 #endif
1191 #if CONFIG_VC1_NVDEC_HWACCEL
1192 AV_PIX_FMT_CUDA,
1193 #endif
1194 #if CONFIG_VC1_VAAPI_HWACCEL
1195 AV_PIX_FMT_VAAPI,
1196 #endif
1197 #if CONFIG_VC1_VDPAU_HWACCEL
1198 AV_PIX_FMT_VDPAU,
1199 #endif
1200 AV_PIX_FMT_YUV420P,
1201 AV_PIX_FMT_NONE
1202 };
1203
1204 const FFCodec ff_vc1_decoder = {
1205 .p.name = "vc1",
1206 .p.long_name = NULL_IF_CONFIG_SMALL("SMPTE VC-1"),
1207 .p.type = AVMEDIA_TYPE_VIDEO,
1208 .p.id = AV_CODEC_ID_VC1,
1209 .priv_data_size = sizeof(VC1Context),
1210 .init = vc1_decode_init,
1211 .close = ff_vc1_decode_end,
1212 FF_CODEC_DECODE_CB(vc1_decode_frame),
1213 .flush = ff_mpeg_flush,
1214 .p.capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY,
1215 .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE,
1216 .p.pix_fmts = vc1_hwaccel_pixfmt_list_420,
1217 .hw_configs = (const AVCodecHWConfigInternal *const []) {
1218 #if CONFIG_VC1_DXVA2_HWACCEL
1219 HWACCEL_DXVA2(vc1),
1220 #endif
1221 #if CONFIG_VC1_D3D11VA_HWACCEL
1222 HWACCEL_D3D11VA(vc1),
1223 #endif
1224 #if CONFIG_VC1_D3D11VA2_HWACCEL
1225 HWACCEL_D3D11VA2(vc1),
1226 #endif
1227 #if CONFIG_VC1_NVDEC_HWACCEL
1228 HWACCEL_NVDEC(vc1),
1229 #endif
1230 #if CONFIG_VC1_VAAPI_HWACCEL
1231 HWACCEL_VAAPI(vc1),
1232 #endif
1233 #if CONFIG_VC1_VDPAU_HWACCEL
1234 HWACCEL_VDPAU(vc1),
1235 #endif
1236 NULL
1237 },
1238 .p.profiles = NULL_IF_CONFIG_SMALL(ff_vc1_profiles)
1239 };
1240
1241 #if CONFIG_WMV3_DECODER
1242 const FFCodec ff_wmv3_decoder = {
1243 .p.name = "wmv3",
1244 .p.long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9"),
1245 .p.type = AVMEDIA_TYPE_VIDEO,
1246 .p.id = AV_CODEC_ID_WMV3,
1247 .priv_data_size = sizeof(VC1Context),
1248 .init = vc1_decode_init,
1249 .close = ff_vc1_decode_end,
1250 FF_CODEC_DECODE_CB(vc1_decode_frame),
1251 .flush = ff_mpeg_flush,
1252 .p.capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY,
1253 .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE,
1254 .p.pix_fmts = vc1_hwaccel_pixfmt_list_420,
1255 .hw_configs = (const AVCodecHWConfigInternal *const []) {
1256 #if CONFIG_WMV3_DXVA2_HWACCEL
1257 HWACCEL_DXVA2(wmv3),
1258 #endif
1259 #if CONFIG_WMV3_D3D11VA_HWACCEL
1260 HWACCEL_D3D11VA(wmv3),
1261 #endif
1262 #if CONFIG_WMV3_D3D11VA2_HWACCEL
1263 HWACCEL_D3D11VA2(wmv3),
1264 #endif
1265 #if CONFIG_WMV3_NVDEC_HWACCEL
1266 HWACCEL_NVDEC(wmv3),
1267 #endif
1268 #if CONFIG_WMV3_VAAPI_HWACCEL
1269 HWACCEL_VAAPI(wmv3),
1270 #endif
1271 #if CONFIG_WMV3_VDPAU_HWACCEL
1272 HWACCEL_VDPAU(wmv3),
1273 #endif
1274 NULL
1275 },
1276 .p.profiles = NULL_IF_CONFIG_SMALL(ff_vc1_profiles)
1277 };
1278 #endif
1279
1280 #if CONFIG_WMV3IMAGE_DECODER
1281 const FFCodec ff_wmv3image_decoder = {
1282 .p.name = "wmv3image",
1283 .p.long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9 Image"),
1284 .p.type = AVMEDIA_TYPE_VIDEO,
1285 .p.id = AV_CODEC_ID_WMV3IMAGE,
1286 .priv_data_size = sizeof(VC1Context),
1287 .init = vc1_decode_init,
1288 .close = ff_vc1_decode_end,
1289 FF_CODEC_DECODE_CB(vc1_decode_frame),
1290 .p.capabilities = AV_CODEC_CAP_DR1,
1291 .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE,
1292 .flush = vc1_sprite_flush,
1293 .p.pix_fmts = (const enum AVPixelFormat[]) {
1294 AV_PIX_FMT_YUV420P,
1295 AV_PIX_FMT_NONE
1296 },
1297 };
1298 #endif
1299
1300 #if CONFIG_VC1IMAGE_DECODER
1301 const FFCodec ff_vc1image_decoder = {
1302 .p.name = "vc1image",
1303 .p.long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9 Image v2"),
1304 .p.type = AVMEDIA_TYPE_VIDEO,
1305 .p.id = AV_CODEC_ID_VC1IMAGE,
1306 .priv_data_size = sizeof(VC1Context),
1307 .init = vc1_decode_init,
1308 .close = ff_vc1_decode_end,
1309 FF_CODEC_DECODE_CB(vc1_decode_frame),
1310 .p.capabilities = AV_CODEC_CAP_DR1,
1311 .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE,
1312 .flush = vc1_sprite_flush,
1313 .p.pix_fmts = (const enum AVPixelFormat[]) {
1314 AV_PIX_FMT_YUV420P,
1315 AV_PIX_FMT_NONE
1316 },
1317 };
1318 #endif
1319