1 /*
2 * VC-1 and WMV3 decoder
3 * Copyright (c) 2011 Mashiat Sarker Shakkhar
4 * Copyright (c) 2006-2007 Konstantin Shishkov
5 * Partly based on vc9.c (c) 2005 Anonymous, Alex Beregszaszi, Michael Niedermayer
6 *
7 * This file is part of FFmpeg.
8 *
9 * FFmpeg is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Lesser General Public
11 * License as published by the Free Software Foundation; either
12 * version 2.1 of the License, or (at your option) any later version.
13 *
14 * FFmpeg is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Lesser General Public License for more details.
18 *
19 * You should have received a copy of the GNU Lesser General Public
20 * License along with FFmpeg; if not, write to the Free Software
21 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22 */
23
24 /**
25 * @file
26 * VC-1 and WMV3 decoder
27 */
28
29 #include "avcodec.h"
30 #include "blockdsp.h"
31 #include "get_bits.h"
32 #include "hwconfig.h"
33 #include "internal.h"
34 #include "mpeg_er.h"
35 #include "mpegvideo.h"
36 #include "msmpeg4.h"
37 #include "msmpeg4data.h"
38 #include "profiles.h"
39 #include "vc1.h"
40 #include "vc1data.h"
41 #include "libavutil/avassert.h"
42
43
44 #if CONFIG_WMV3IMAGE_DECODER || CONFIG_VC1IMAGE_DECODER
45
46 typedef struct SpriteData {
47 /**
48 * Transform coefficients for both sprites in 16.16 fixed point format,
49 * in the order they appear in the bitstream:
50 * x scale
51 * rotation 1 (unused)
52 * x offset
53 * rotation 2 (unused)
54 * y scale
55 * y offset
56 * alpha
57 */
58 int coefs[2][7];
59
60 int effect_type, effect_flag;
61 int effect_pcount1, effect_pcount2; ///< amount of effect parameters stored in effect_params
62 int effect_params1[15], effect_params2[10]; ///< effect parameters in 16.16 fixed point format
63 } SpriteData;
64
get_fp_val(GetBitContext * gb)65 static inline int get_fp_val(GetBitContext* gb)
66 {
67 return (get_bits_long(gb, 30) - (1 << 29)) << 1;
68 }
69
vc1_sprite_parse_transform(GetBitContext * gb,int c[7])70 static void vc1_sprite_parse_transform(GetBitContext* gb, int c[7])
71 {
72 c[1] = c[3] = 0;
73
74 switch (get_bits(gb, 2)) {
75 case 0:
76 c[0] = 1 << 16;
77 c[2] = get_fp_val(gb);
78 c[4] = 1 << 16;
79 break;
80 case 1:
81 c[0] = c[4] = get_fp_val(gb);
82 c[2] = get_fp_val(gb);
83 break;
84 case 2:
85 c[0] = get_fp_val(gb);
86 c[2] = get_fp_val(gb);
87 c[4] = get_fp_val(gb);
88 break;
89 case 3:
90 c[0] = get_fp_val(gb);
91 c[1] = get_fp_val(gb);
92 c[2] = get_fp_val(gb);
93 c[3] = get_fp_val(gb);
94 c[4] = get_fp_val(gb);
95 break;
96 }
97 c[5] = get_fp_val(gb);
98 if (get_bits1(gb))
99 c[6] = get_fp_val(gb);
100 else
101 c[6] = 1 << 16;
102 }
103
vc1_parse_sprites(VC1Context * v,GetBitContext * gb,SpriteData * sd)104 static int vc1_parse_sprites(VC1Context *v, GetBitContext* gb, SpriteData* sd)
105 {
106 AVCodecContext *avctx = v->s.avctx;
107 int sprite, i;
108
109 for (sprite = 0; sprite <= v->two_sprites; sprite++) {
110 vc1_sprite_parse_transform(gb, sd->coefs[sprite]);
111 if (sd->coefs[sprite][1] || sd->coefs[sprite][3])
112 avpriv_request_sample(avctx, "Non-zero rotation coefficients");
113 av_log(avctx, AV_LOG_DEBUG, sprite ? "S2:" : "S1:");
114 for (i = 0; i < 7; i++)
115 av_log(avctx, AV_LOG_DEBUG, " %d.%.3d",
116 sd->coefs[sprite][i] / (1<<16),
117 (abs(sd->coefs[sprite][i]) & 0xFFFF) * 1000 / (1 << 16));
118 av_log(avctx, AV_LOG_DEBUG, "\n");
119 }
120
121 skip_bits(gb, 2);
122 if (sd->effect_type = get_bits_long(gb, 30)) {
123 switch (sd->effect_pcount1 = get_bits(gb, 4)) {
124 case 7:
125 vc1_sprite_parse_transform(gb, sd->effect_params1);
126 break;
127 case 14:
128 vc1_sprite_parse_transform(gb, sd->effect_params1);
129 vc1_sprite_parse_transform(gb, sd->effect_params1 + 7);
130 break;
131 default:
132 for (i = 0; i < sd->effect_pcount1; i++)
133 sd->effect_params1[i] = get_fp_val(gb);
134 }
135 if (sd->effect_type != 13 || sd->effect_params1[0] != sd->coefs[0][6]) {
136 // effect 13 is simple alpha blending and matches the opacity above
137 av_log(avctx, AV_LOG_DEBUG, "Effect: %d; params: ", sd->effect_type);
138 for (i = 0; i < sd->effect_pcount1; i++)
139 av_log(avctx, AV_LOG_DEBUG, " %d.%.2d",
140 sd->effect_params1[i] / (1 << 16),
141 (abs(sd->effect_params1[i]) & 0xFFFF) * 1000 / (1 << 16));
142 av_log(avctx, AV_LOG_DEBUG, "\n");
143 }
144
145 sd->effect_pcount2 = get_bits(gb, 16);
146 if (sd->effect_pcount2 > 10) {
147 av_log(avctx, AV_LOG_ERROR, "Too many effect parameters\n");
148 return AVERROR_INVALIDDATA;
149 } else if (sd->effect_pcount2) {
150 i = -1;
151 av_log(avctx, AV_LOG_DEBUG, "Effect params 2: ");
152 while (++i < sd->effect_pcount2) {
153 sd->effect_params2[i] = get_fp_val(gb);
154 av_log(avctx, AV_LOG_DEBUG, " %d.%.2d",
155 sd->effect_params2[i] / (1 << 16),
156 (abs(sd->effect_params2[i]) & 0xFFFF) * 1000 / (1 << 16));
157 }
158 av_log(avctx, AV_LOG_DEBUG, "\n");
159 }
160 }
161 if (sd->effect_flag = get_bits1(gb))
162 av_log(avctx, AV_LOG_DEBUG, "Effect flag set\n");
163
164 if (get_bits_count(gb) >= gb->size_in_bits +
165 (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE ? 64 : 0)) {
166 av_log(avctx, AV_LOG_ERROR, "Buffer overrun\n");
167 return AVERROR_INVALIDDATA;
168 }
169 if (get_bits_count(gb) < gb->size_in_bits - 8)
170 av_log(avctx, AV_LOG_WARNING, "Buffer not fully read\n");
171
172 return 0;
173 }
174
vc1_draw_sprites(VC1Context * v,SpriteData * sd)175 static void vc1_draw_sprites(VC1Context *v, SpriteData* sd)
176 {
177 int i, plane, row, sprite;
178 int sr_cache[2][2] = { { -1, -1 }, { -1, -1 } };
179 uint8_t* src_h[2][2];
180 int xoff[2], xadv[2], yoff[2], yadv[2], alpha;
181 int ysub[2];
182 MpegEncContext *s = &v->s;
183
184 for (i = 0; i <= v->two_sprites; i++) {
185 xoff[i] = av_clip(sd->coefs[i][2], 0, v->sprite_width-1 << 16);
186 xadv[i] = sd->coefs[i][0];
187 if (xadv[i] != 1<<16 || (v->sprite_width << 16) - (v->output_width << 16) - xoff[i])
188 xadv[i] = av_clip(xadv[i], 0, ((v->sprite_width<<16) - xoff[i] - 1) / v->output_width);
189
190 yoff[i] = av_clip(sd->coefs[i][5], 0, v->sprite_height-1 << 16);
191 yadv[i] = av_clip(sd->coefs[i][4], 0, ((v->sprite_height << 16) - yoff[i]) / v->output_height);
192 }
193 alpha = av_clip_uint16(sd->coefs[1][6]);
194
195 for (plane = 0; plane < (CONFIG_GRAY && s->avctx->flags & AV_CODEC_FLAG_GRAY ? 1 : 3); plane++) {
196 int width = v->output_width>>!!plane;
197
198 for (row = 0; row < v->output_height>>!!plane; row++) {
199 uint8_t *dst = v->sprite_output_frame->data[plane] +
200 v->sprite_output_frame->linesize[plane] * row;
201
202 for (sprite = 0; sprite <= v->two_sprites; sprite++) {
203 uint8_t *iplane = s->current_picture.f->data[plane];
204 int iline = s->current_picture.f->linesize[plane];
205 int ycoord = yoff[sprite] + yadv[sprite] * row;
206 int yline = ycoord >> 16;
207 int next_line;
208 ysub[sprite] = ycoord & 0xFFFF;
209 if (sprite) {
210 iplane = s->last_picture.f->data[plane];
211 iline = s->last_picture.f->linesize[plane];
212 }
213 next_line = FFMIN(yline + 1, (v->sprite_height >> !!plane) - 1) * iline;
214 if (!(xoff[sprite] & 0xFFFF) && xadv[sprite] == 1 << 16) {
215 src_h[sprite][0] = iplane + (xoff[sprite] >> 16) + yline * iline;
216 if (ysub[sprite])
217 src_h[sprite][1] = iplane + (xoff[sprite] >> 16) + next_line;
218 } else {
219 if (sr_cache[sprite][0] != yline) {
220 if (sr_cache[sprite][1] == yline) {
221 FFSWAP(uint8_t*, v->sr_rows[sprite][0], v->sr_rows[sprite][1]);
222 FFSWAP(int, sr_cache[sprite][0], sr_cache[sprite][1]);
223 } else {
224 v->vc1dsp.sprite_h(v->sr_rows[sprite][0], iplane + yline * iline, xoff[sprite], xadv[sprite], width);
225 sr_cache[sprite][0] = yline;
226 }
227 }
228 if (ysub[sprite] && sr_cache[sprite][1] != yline + 1) {
229 v->vc1dsp.sprite_h(v->sr_rows[sprite][1],
230 iplane + next_line, xoff[sprite],
231 xadv[sprite], width);
232 sr_cache[sprite][1] = yline + 1;
233 }
234 src_h[sprite][0] = v->sr_rows[sprite][0];
235 src_h[sprite][1] = v->sr_rows[sprite][1];
236 }
237 }
238
239 if (!v->two_sprites) {
240 if (ysub[0]) {
241 v->vc1dsp.sprite_v_single(dst, src_h[0][0], src_h[0][1], ysub[0], width);
242 } else {
243 memcpy(dst, src_h[0][0], width);
244 }
245 } else {
246 if (ysub[0] && ysub[1]) {
247 v->vc1dsp.sprite_v_double_twoscale(dst, src_h[0][0], src_h[0][1], ysub[0],
248 src_h[1][0], src_h[1][1], ysub[1], alpha, width);
249 } else if (ysub[0]) {
250 v->vc1dsp.sprite_v_double_onescale(dst, src_h[0][0], src_h[0][1], ysub[0],
251 src_h[1][0], alpha, width);
252 } else if (ysub[1]) {
253 v->vc1dsp.sprite_v_double_onescale(dst, src_h[1][0], src_h[1][1], ysub[1],
254 src_h[0][0], (1<<16)-1-alpha, width);
255 } else {
256 v->vc1dsp.sprite_v_double_noscale(dst, src_h[0][0], src_h[1][0], alpha, width);
257 }
258 }
259 }
260
261 if (!plane) {
262 for (i = 0; i <= v->two_sprites; i++) {
263 xoff[i] >>= 1;
264 yoff[i] >>= 1;
265 }
266 }
267
268 }
269 }
270
271
vc1_decode_sprites(VC1Context * v,GetBitContext * gb)272 static int vc1_decode_sprites(VC1Context *v, GetBitContext* gb)
273 {
274 int ret;
275 MpegEncContext *s = &v->s;
276 AVCodecContext *avctx = s->avctx;
277 SpriteData sd;
278
279 memset(&sd, 0, sizeof(sd));
280
281 ret = vc1_parse_sprites(v, gb, &sd);
282 if (ret < 0)
283 return ret;
284
285 if (!s->current_picture.f || !s->current_picture.f->data[0]) {
286 av_log(avctx, AV_LOG_ERROR, "Got no sprites\n");
287 return AVERROR_UNKNOWN;
288 }
289
290 if (v->two_sprites && (!s->last_picture_ptr || !s->last_picture.f->data[0])) {
291 av_log(avctx, AV_LOG_WARNING, "Need two sprites, only got one\n");
292 v->two_sprites = 0;
293 }
294
295 av_frame_unref(v->sprite_output_frame);
296 if ((ret = ff_get_buffer(avctx, v->sprite_output_frame, 0)) < 0)
297 return ret;
298
299 vc1_draw_sprites(v, &sd);
300
301 return 0;
302 }
303
vc1_sprite_flush(AVCodecContext * avctx)304 static void vc1_sprite_flush(AVCodecContext *avctx)
305 {
306 VC1Context *v = avctx->priv_data;
307 MpegEncContext *s = &v->s;
308 AVFrame *f = s->current_picture.f;
309 int plane, i;
310
311 /* Windows Media Image codecs have a convergence interval of two keyframes.
312 Since we can't enforce it, clear to black the missing sprite. This is
313 wrong but it looks better than doing nothing. */
314
315 if (f && f->data[0])
316 for (plane = 0; plane < (CONFIG_GRAY && s->avctx->flags & AV_CODEC_FLAG_GRAY ? 1 : 3); plane++)
317 for (i = 0; i < v->sprite_height>>!!plane; i++)
318 memset(f->data[plane] + i * f->linesize[plane],
319 plane ? 128 : 0, f->linesize[plane]);
320 }
321
322 #endif
323
ff_vc1_decode_init_alloc_tables(VC1Context * v)324 av_cold int ff_vc1_decode_init_alloc_tables(VC1Context *v)
325 {
326 MpegEncContext *s = &v->s;
327 int i, ret = AVERROR(ENOMEM);
328 int mb_height = FFALIGN(s->mb_height, 2);
329
330 /* Allocate mb bitplanes */
331 v->mv_type_mb_plane = av_malloc (s->mb_stride * mb_height);
332 v->direct_mb_plane = av_malloc (s->mb_stride * mb_height);
333 v->forward_mb_plane = av_malloc (s->mb_stride * mb_height);
334 v->fieldtx_plane = av_mallocz(s->mb_stride * mb_height);
335 v->acpred_plane = av_malloc (s->mb_stride * mb_height);
336 v->over_flags_plane = av_malloc (s->mb_stride * mb_height);
337 if (!v->mv_type_mb_plane || !v->direct_mb_plane || !v->forward_mb_plane ||
338 !v->fieldtx_plane || !v->acpred_plane || !v->over_flags_plane)
339 goto error;
340
341 v->n_allocated_blks = s->mb_width + 2;
342 v->block = av_malloc(sizeof(*v->block) * v->n_allocated_blks);
343 v->cbp_base = av_malloc(sizeof(v->cbp_base[0]) * 3 * s->mb_stride);
344 if (!v->block || !v->cbp_base)
345 goto error;
346 v->cbp = v->cbp_base + 2 * s->mb_stride;
347 v->ttblk_base = av_malloc(sizeof(v->ttblk_base[0]) * 3 * s->mb_stride);
348 if (!v->ttblk_base)
349 goto error;
350 v->ttblk = v->ttblk_base + 2 * s->mb_stride;
351 v->is_intra_base = av_mallocz(sizeof(v->is_intra_base[0]) * 3 * s->mb_stride);
352 if (!v->is_intra_base)
353 goto error;
354 v->is_intra = v->is_intra_base + 2 * s->mb_stride;
355 v->luma_mv_base = av_mallocz(sizeof(v->luma_mv_base[0]) * 3 * s->mb_stride);
356 if (!v->luma_mv_base)
357 goto error;
358 v->luma_mv = v->luma_mv_base + 2 * s->mb_stride;
359
360 /* allocate block type info in that way so it could be used with s->block_index[] */
361 v->mb_type_base = av_malloc(s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
362 if (!v->mb_type_base)
363 goto error;
364 v->mb_type[0] = v->mb_type_base + s->b8_stride + 1;
365 v->mb_type[1] = v->mb_type_base + s->b8_stride * (mb_height * 2 + 1) + s->mb_stride + 1;
366 v->mb_type[2] = v->mb_type[1] + s->mb_stride * (mb_height + 1);
367
368 /* allocate memory to store block level MV info */
369 v->blk_mv_type_base = av_mallocz( s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
370 if (!v->blk_mv_type_base)
371 goto error;
372 v->blk_mv_type = v->blk_mv_type_base + s->b8_stride + 1;
373 v->mv_f_base = av_mallocz(2 * (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2));
374 if (!v->mv_f_base)
375 goto error;
376 v->mv_f[0] = v->mv_f_base + s->b8_stride + 1;
377 v->mv_f[1] = v->mv_f[0] + (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
378 v->mv_f_next_base = av_mallocz(2 * (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2));
379 if (!v->mv_f_next_base)
380 goto error;
381 v->mv_f_next[0] = v->mv_f_next_base + s->b8_stride + 1;
382 v->mv_f_next[1] = v->mv_f_next[0] + (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
383
384 if (s->avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || s->avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
385 for (i = 0; i < 4; i++)
386 if (!(v->sr_rows[i >> 1][i & 1] = av_malloc(v->output_width)))
387 return AVERROR(ENOMEM);
388 }
389
390 ret = ff_intrax8_common_init(s->avctx, &v->x8, &s->idsp,
391 s->block, s->block_last_index,
392 s->mb_width, s->mb_height);
393 if (ret < 0)
394 goto error;
395
396 return 0;
397
398 error:
399 ff_vc1_decode_end(s->avctx);
400 return ret;
401 }
402
ff_vc1_init_transposed_scantables(VC1Context * v)403 av_cold void ff_vc1_init_transposed_scantables(VC1Context *v)
404 {
405 int i;
406 for (i = 0; i < 64; i++) {
407 #define transpose(x) (((x) >> 3) | (((x) & 7) << 3))
408 v->zz_8x8[0][i] = transpose(ff_wmv1_scantable[0][i]);
409 v->zz_8x8[1][i] = transpose(ff_wmv1_scantable[1][i]);
410 v->zz_8x8[2][i] = transpose(ff_wmv1_scantable[2][i]);
411 v->zz_8x8[3][i] = transpose(ff_wmv1_scantable[3][i]);
412 v->zzi_8x8[i] = transpose(ff_vc1_adv_interlaced_8x8_zz[i]);
413 }
414 v->left_blk_sh = 0;
415 v->top_blk_sh = 3;
416 }
417
418 /** Initialize a VC1/WMV3 decoder
419 * @todo TODO: Handle VC-1 IDUs (Transport level?)
420 * @todo TODO: Decipher remaining bits in extra_data
421 */
vc1_decode_init(AVCodecContext * avctx)422 static av_cold int vc1_decode_init(AVCodecContext *avctx)
423 {
424 VC1Context *v = avctx->priv_data;
425 MpegEncContext *s = &v->s;
426 GetBitContext gb;
427 int ret;
428
429 /* save the container output size for WMImage */
430 v->output_width = avctx->width;
431 v->output_height = avctx->height;
432
433 if (!avctx->extradata_size || !avctx->extradata)
434 return AVERROR_INVALIDDATA;
435 v->s.avctx = avctx;
436
437 if ((ret = ff_vc1_init_common(v)) < 0)
438 return ret;
439
440 if (avctx->codec_id == AV_CODEC_ID_WMV3 || avctx->codec_id == AV_CODEC_ID_WMV3IMAGE) {
441 int count = 0;
442
443 // looks like WMV3 has a sequence header stored in the extradata
444 // advanced sequence header may be before the first frame
445 // the last byte of the extradata is a version number, 1 for the
446 // samples we can decode
447
448 init_get_bits(&gb, avctx->extradata, avctx->extradata_size*8);
449
450 if ((ret = ff_vc1_decode_sequence_header(avctx, v, &gb)) < 0)
451 return ret;
452
453 if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE && !v->res_sprite) {
454 avpriv_request_sample(avctx, "Non sprite WMV3IMAGE");
455 return AVERROR_PATCHWELCOME;
456 }
457
458 count = avctx->extradata_size*8 - get_bits_count(&gb);
459 if (count > 0) {
460 av_log(avctx, AV_LOG_INFO, "Extra data: %i bits left, value: %X\n",
461 count, get_bits_long(&gb, FFMIN(count, 32)));
462 } else if (count < 0) {
463 av_log(avctx, AV_LOG_INFO, "Read %i bits in overflow\n", -count);
464 }
465 } else { // VC1/WVC1/WVP2
466 const uint8_t *start = avctx->extradata;
467 uint8_t *end = avctx->extradata + avctx->extradata_size;
468 const uint8_t *next;
469 int size, buf2_size;
470 uint8_t *buf2 = NULL;
471 int seq_initialized = 0, ep_initialized = 0;
472
473 if (avctx->extradata_size < 16) {
474 av_log(avctx, AV_LOG_ERROR, "Extradata size too small: %i\n", avctx->extradata_size);
475 return AVERROR_INVALIDDATA;
476 }
477
478 buf2 = av_mallocz(avctx->extradata_size + AV_INPUT_BUFFER_PADDING_SIZE);
479 if (!buf2)
480 return AVERROR(ENOMEM);
481
482 start = find_next_marker(start, end); // in WVC1 extradata first byte is its size, but can be 0 in mkv
483 next = start;
484 for (; next < end; start = next) {
485 next = find_next_marker(start + 4, end);
486 size = next - start - 4;
487 if (size <= 0)
488 continue;
489 buf2_size = vc1_unescape_buffer(start + 4, size, buf2);
490 init_get_bits(&gb, buf2, buf2_size * 8);
491 switch (AV_RB32(start)) {
492 case VC1_CODE_SEQHDR:
493 if ((ret = ff_vc1_decode_sequence_header(avctx, v, &gb)) < 0) {
494 av_free(buf2);
495 return ret;
496 }
497 seq_initialized = 1;
498 break;
499 case VC1_CODE_ENTRYPOINT:
500 if ((ret = ff_vc1_decode_entry_point(avctx, v, &gb)) < 0) {
501 av_free(buf2);
502 return ret;
503 }
504 ep_initialized = 1;
505 break;
506 }
507 }
508 av_free(buf2);
509 if (!seq_initialized || !ep_initialized) {
510 av_log(avctx, AV_LOG_ERROR, "Incomplete extradata\n");
511 return AVERROR_INVALIDDATA;
512 }
513 v->res_sprite = (avctx->codec_id == AV_CODEC_ID_VC1IMAGE);
514 }
515
516 avctx->profile = v->profile;
517 if (v->profile == PROFILE_ADVANCED)
518 avctx->level = v->level;
519
520 if (!CONFIG_GRAY || !(avctx->flags & AV_CODEC_FLAG_GRAY))
521 avctx->pix_fmt = ff_get_format(avctx, avctx->codec->pix_fmts);
522 else {
523 avctx->pix_fmt = AV_PIX_FMT_GRAY8;
524 if (avctx->color_range == AVCOL_RANGE_UNSPECIFIED)
525 avctx->color_range = AVCOL_RANGE_MPEG;
526 }
527
528 // ensure static VLC tables are initialized
529 if ((ret = ff_msmpeg4_decode_init(avctx)) < 0)
530 return ret;
531 if ((ret = ff_vc1_decode_init_alloc_tables(v)) < 0)
532 return ret;
533 // Hack to ensure the above functions will be called
534 // again once we know all necessary settings.
535 // That this is necessary might indicate a bug.
536 ff_vc1_decode_end(avctx);
537
538 ff_blockdsp_init(&s->bdsp, avctx);
539 ff_h264chroma_init(&v->h264chroma, 8);
540 ff_qpeldsp_init(&s->qdsp);
541
542 // Must happen after calling ff_vc1_decode_end
543 // to avoid de-allocating the sprite_output_frame
544 v->sprite_output_frame = av_frame_alloc();
545 if (!v->sprite_output_frame)
546 return AVERROR(ENOMEM);
547
548 avctx->has_b_frames = !!avctx->max_b_frames;
549
550 if (v->color_prim == 1 || v->color_prim == 5 || v->color_prim == 6)
551 avctx->color_primaries = v->color_prim;
552 if (v->transfer_char == 1 || v->transfer_char == 7)
553 avctx->color_trc = v->transfer_char;
554 if (v->matrix_coef == 1 || v->matrix_coef == 6 || v->matrix_coef == 7)
555 avctx->colorspace = v->matrix_coef;
556
557 s->mb_width = (avctx->coded_width + 15) >> 4;
558 s->mb_height = (avctx->coded_height + 15) >> 4;
559
560 if (v->profile == PROFILE_ADVANCED || v->res_fasttx) {
561 ff_vc1_init_transposed_scantables(v);
562 } else {
563 memcpy(v->zz_8x8, ff_wmv1_scantable, 4*64);
564 v->left_blk_sh = 3;
565 v->top_blk_sh = 0;
566 }
567
568 if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
569 v->sprite_width = avctx->coded_width;
570 v->sprite_height = avctx->coded_height;
571
572 avctx->coded_width = avctx->width = v->output_width;
573 avctx->coded_height = avctx->height = v->output_height;
574
575 // prevent 16.16 overflows
576 if (v->sprite_width > 1 << 14 ||
577 v->sprite_height > 1 << 14 ||
578 v->output_width > 1 << 14 ||
579 v->output_height > 1 << 14) {
580 ret = AVERROR_INVALIDDATA;
581 goto error;
582 }
583
584 if ((v->sprite_width&1) || (v->sprite_height&1)) {
585 avpriv_request_sample(avctx, "odd sprites support");
586 ret = AVERROR_PATCHWELCOME;
587 goto error;
588 }
589 }
590 return 0;
591 error:
592 av_frame_free(&v->sprite_output_frame);
593 return ret;
594 }
595
596 /** Close a VC1/WMV3 decoder
597 * @warning Initial try at using MpegEncContext stuff
598 */
ff_vc1_decode_end(AVCodecContext * avctx)599 av_cold int ff_vc1_decode_end(AVCodecContext *avctx)
600 {
601 VC1Context *v = avctx->priv_data;
602 int i;
603
604 av_frame_free(&v->sprite_output_frame);
605
606 for (i = 0; i < 4; i++)
607 av_freep(&v->sr_rows[i >> 1][i & 1]);
608 av_freep(&v->hrd_rate);
609 av_freep(&v->hrd_buffer);
610 ff_mpv_common_end(&v->s);
611 av_freep(&v->mv_type_mb_plane);
612 av_freep(&v->direct_mb_plane);
613 av_freep(&v->forward_mb_plane);
614 av_freep(&v->fieldtx_plane);
615 av_freep(&v->acpred_plane);
616 av_freep(&v->over_flags_plane);
617 av_freep(&v->mb_type_base);
618 av_freep(&v->blk_mv_type_base);
619 av_freep(&v->mv_f_base);
620 av_freep(&v->mv_f_next_base);
621 av_freep(&v->block);
622 av_freep(&v->cbp_base);
623 av_freep(&v->ttblk_base);
624 av_freep(&v->is_intra_base); // FIXME use v->mb_type[]
625 av_freep(&v->luma_mv_base);
626 ff_intrax8_common_end(&v->x8);
627 return 0;
628 }
629
630
631 /** Decode a VC1/WMV3 frame
632 * @todo TODO: Handle VC-1 IDUs (Transport level?)
633 */
vc1_decode_frame(AVCodecContext * avctx,void * data,int * got_frame,AVPacket * avpkt)634 static int vc1_decode_frame(AVCodecContext *avctx, void *data,
635 int *got_frame, AVPacket *avpkt)
636 {
637 const uint8_t *buf = avpkt->data;
638 int buf_size = avpkt->size, n_slices = 0, i, ret;
639 VC1Context *v = avctx->priv_data;
640 MpegEncContext *s = &v->s;
641 AVFrame *pict = data;
642 uint8_t *buf2 = NULL;
643 const uint8_t *buf_start = buf, *buf_start_second_field = NULL;
644 int mb_height, n_slices1=-1;
645 struct {
646 uint8_t *buf;
647 GetBitContext gb;
648 int mby_start;
649 const uint8_t *rawbuf;
650 int raw_size;
651 } *slices = NULL, *tmp;
652
653 v->second_field = 0;
654
655 if(s->avctx->flags & AV_CODEC_FLAG_LOW_DELAY)
656 s->low_delay = 1;
657
658 /* no supplementary picture */
659 if (buf_size == 0 || (buf_size == 4 && AV_RB32(buf) == VC1_CODE_ENDOFSEQ)) {
660 /* special case for last picture */
661 if (s->low_delay == 0 && s->next_picture_ptr) {
662 if ((ret = av_frame_ref(pict, s->next_picture_ptr->f)) < 0)
663 return ret;
664 s->next_picture_ptr = NULL;
665
666 *got_frame = 1;
667 }
668
669 return buf_size;
670 }
671
672 //for advanced profile we may need to parse and unescape data
673 if (avctx->codec_id == AV_CODEC_ID_VC1 || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
674 int buf_size2 = 0;
675 buf2 = av_mallocz(buf_size + AV_INPUT_BUFFER_PADDING_SIZE);
676 if (!buf2)
677 return AVERROR(ENOMEM);
678
679 if (IS_MARKER(AV_RB32(buf))) { /* frame starts with marker and needs to be parsed */
680 const uint8_t *start, *end, *next;
681 int size;
682
683 next = buf;
684 for (start = buf, end = buf + buf_size; next < end; start = next) {
685 next = find_next_marker(start + 4, end);
686 size = next - start - 4;
687 if (size <= 0) continue;
688 switch (AV_RB32(start)) {
689 case VC1_CODE_FRAME:
690 if (avctx->hwaccel)
691 buf_start = start;
692 buf_size2 = vc1_unescape_buffer(start + 4, size, buf2);
693 break;
694 case VC1_CODE_FIELD: {
695 int buf_size3;
696 if (avctx->hwaccel)
697 buf_start_second_field = start;
698 tmp = av_realloc_array(slices, sizeof(*slices), n_slices+1);
699 if (!tmp) {
700 ret = AVERROR(ENOMEM);
701 goto err;
702 }
703 slices = tmp;
704 slices[n_slices].buf = av_mallocz(size + AV_INPUT_BUFFER_PADDING_SIZE);
705 if (!slices[n_slices].buf) {
706 ret = AVERROR(ENOMEM);
707 goto err;
708 }
709 buf_size3 = vc1_unescape_buffer(start + 4, size,
710 slices[n_slices].buf);
711 init_get_bits(&slices[n_slices].gb, slices[n_slices].buf,
712 buf_size3 << 3);
713 slices[n_slices].mby_start = avctx->coded_height + 31 >> 5;
714 slices[n_slices].rawbuf = start;
715 slices[n_slices].raw_size = size + 4;
716 n_slices1 = n_slices - 1; // index of the last slice of the first field
717 n_slices++;
718 break;
719 }
720 case VC1_CODE_ENTRYPOINT: /* it should be before frame data */
721 buf_size2 = vc1_unescape_buffer(start + 4, size, buf2);
722 init_get_bits(&s->gb, buf2, buf_size2 * 8);
723 ff_vc1_decode_entry_point(avctx, v, &s->gb);
724 break;
725 case VC1_CODE_SLICE: {
726 int buf_size3;
727 tmp = av_realloc_array(slices, sizeof(*slices), n_slices+1);
728 if (!tmp) {
729 ret = AVERROR(ENOMEM);
730 goto err;
731 }
732 slices = tmp;
733 slices[n_slices].buf = av_mallocz(size + AV_INPUT_BUFFER_PADDING_SIZE);
734 if (!slices[n_slices].buf) {
735 ret = AVERROR(ENOMEM);
736 goto err;
737 }
738 buf_size3 = vc1_unescape_buffer(start + 4, size,
739 slices[n_slices].buf);
740 init_get_bits(&slices[n_slices].gb, slices[n_slices].buf,
741 buf_size3 << 3);
742 slices[n_slices].mby_start = get_bits(&slices[n_slices].gb, 9);
743 slices[n_slices].rawbuf = start;
744 slices[n_slices].raw_size = size + 4;
745 n_slices++;
746 break;
747 }
748 }
749 }
750 } else if (v->interlace && ((buf[0] & 0xC0) == 0xC0)) { /* WVC1 interlaced stores both fields divided by marker */
751 const uint8_t *divider;
752 int buf_size3;
753
754 divider = find_next_marker(buf, buf + buf_size);
755 if ((divider == (buf + buf_size)) || AV_RB32(divider) != VC1_CODE_FIELD) {
756 av_log(avctx, AV_LOG_ERROR, "Error in WVC1 interlaced frame\n");
757 ret = AVERROR_INVALIDDATA;
758 goto err;
759 } else { // found field marker, unescape second field
760 if (avctx->hwaccel)
761 buf_start_second_field = divider;
762 tmp = av_realloc_array(slices, sizeof(*slices), n_slices+1);
763 if (!tmp) {
764 ret = AVERROR(ENOMEM);
765 goto err;
766 }
767 slices = tmp;
768 slices[n_slices].buf = av_mallocz(buf_size + AV_INPUT_BUFFER_PADDING_SIZE);
769 if (!slices[n_slices].buf) {
770 ret = AVERROR(ENOMEM);
771 goto err;
772 }
773 buf_size3 = vc1_unescape_buffer(divider + 4, buf + buf_size - divider - 4, slices[n_slices].buf);
774 init_get_bits(&slices[n_slices].gb, slices[n_slices].buf,
775 buf_size3 << 3);
776 slices[n_slices].mby_start = s->mb_height + 1 >> 1;
777 slices[n_slices].rawbuf = divider;
778 slices[n_slices].raw_size = buf + buf_size - divider;
779 n_slices1 = n_slices - 1;
780 n_slices++;
781 }
782 buf_size2 = vc1_unescape_buffer(buf, divider - buf, buf2);
783 } else {
784 buf_size2 = vc1_unescape_buffer(buf, buf_size, buf2);
785 }
786 init_get_bits(&s->gb, buf2, buf_size2*8);
787 } else
788 init_get_bits(&s->gb, buf, buf_size*8);
789
790 if (v->res_sprite) {
791 v->new_sprite = !get_bits1(&s->gb);
792 v->two_sprites = get_bits1(&s->gb);
793 /* res_sprite means a Windows Media Image stream, AV_CODEC_ID_*IMAGE means
794 we're using the sprite compositor. These are intentionally kept separate
795 so you can get the raw sprites by using the wmv3 decoder for WMVP or
796 the vc1 one for WVP2 */
797 if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
798 if (v->new_sprite) {
799 // switch AVCodecContext parameters to those of the sprites
800 avctx->width = avctx->coded_width = v->sprite_width;
801 avctx->height = avctx->coded_height = v->sprite_height;
802 } else {
803 goto image;
804 }
805 }
806 }
807
808 if (s->context_initialized &&
809 (s->width != avctx->coded_width ||
810 s->height != avctx->coded_height)) {
811 ff_vc1_decode_end(avctx);
812 }
813
814 if (!s->context_initialized) {
815 if ((ret = ff_msmpeg4_decode_init(avctx)) < 0)
816 goto err;
817 if ((ret = ff_vc1_decode_init_alloc_tables(v)) < 0) {
818 ff_mpv_common_end(s);
819 goto err;
820 }
821
822 s->low_delay = !avctx->has_b_frames || v->res_sprite;
823
824 if (v->profile == PROFILE_ADVANCED) {
825 if(avctx->coded_width<=1 || avctx->coded_height<=1) {
826 ret = AVERROR_INVALIDDATA;
827 goto err;
828 }
829 s->h_edge_pos = avctx->coded_width;
830 s->v_edge_pos = avctx->coded_height;
831 }
832 }
833
834 // do parse frame header
835 v->pic_header_flag = 0;
836 v->first_pic_header_flag = 1;
837 if (v->profile < PROFILE_ADVANCED) {
838 if ((ret = ff_vc1_parse_frame_header(v, &s->gb)) < 0) {
839 goto err;
840 }
841 } else {
842 if ((ret = ff_vc1_parse_frame_header_adv(v, &s->gb)) < 0) {
843 goto err;
844 }
845 }
846 v->first_pic_header_flag = 0;
847
848 if (avctx->debug & FF_DEBUG_PICT_INFO)
849 av_log(v->s.avctx, AV_LOG_DEBUG, "pict_type: %c\n", av_get_picture_type_char(s->pict_type));
850
851 if ((avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE)
852 && s->pict_type != AV_PICTURE_TYPE_I) {
853 av_log(v->s.avctx, AV_LOG_ERROR, "Sprite decoder: expected I-frame\n");
854 ret = AVERROR_INVALIDDATA;
855 goto err;
856 }
857 if ((avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE)
858 && v->field_mode) {
859 av_log(v->s.avctx, AV_LOG_ERROR, "Sprite decoder: expected Frames not Fields\n");
860 ret = AVERROR_INVALIDDATA;
861 goto err;
862 }
863 if ((s->mb_height >> v->field_mode) == 0) {
864 av_log(v->s.avctx, AV_LOG_ERROR, "image too short\n");
865 ret = AVERROR_INVALIDDATA;
866 goto err;
867 }
868
869 // for skipping the frame
870 s->current_picture.f->pict_type = s->pict_type;
871 s->current_picture.f->key_frame = s->pict_type == AV_PICTURE_TYPE_I;
872
873 /* skip B-frames if we don't have reference frames */
874 if (!s->last_picture_ptr && (s->pict_type == AV_PICTURE_TYPE_B || s->droppable)) {
875 av_log(v->s.avctx, AV_LOG_DEBUG, "Skipping B frame without reference frames\n");
876 goto end;
877 }
878 if ((avctx->skip_frame >= AVDISCARD_NONREF && s->pict_type == AV_PICTURE_TYPE_B) ||
879 (avctx->skip_frame >= AVDISCARD_NONKEY && s->pict_type != AV_PICTURE_TYPE_I) ||
880 avctx->skip_frame >= AVDISCARD_ALL) {
881 goto end;
882 }
883
884 if (s->next_p_frame_damaged) {
885 if (s->pict_type == AV_PICTURE_TYPE_B)
886 goto end;
887 else
888 s->next_p_frame_damaged = 0;
889 }
890
891 if ((ret = ff_mpv_frame_start(s, avctx)) < 0) {
892 goto err;
893 }
894
895 v->s.current_picture_ptr->field_picture = v->field_mode;
896 v->s.current_picture_ptr->f->interlaced_frame = (v->fcm != PROGRESSIVE);
897 v->s.current_picture_ptr->f->top_field_first = v->tff;
898
899 // process pulldown flags
900 s->current_picture_ptr->f->repeat_pict = 0;
901 // Pulldown flags are only valid when 'broadcast' has been set.
902 // So ticks_per_frame will be 2
903 if (v->rff) {
904 // repeat field
905 s->current_picture_ptr->f->repeat_pict = 1;
906 } else if (v->rptfrm) {
907 // repeat frames
908 s->current_picture_ptr->f->repeat_pict = v->rptfrm * 2;
909 }
910
911 s->me.qpel_put = s->qdsp.put_qpel_pixels_tab;
912 s->me.qpel_avg = s->qdsp.avg_qpel_pixels_tab;
913
914 if (avctx->hwaccel) {
915 s->mb_y = 0;
916 if (v->field_mode && buf_start_second_field) {
917 // decode first field
918 s->picture_structure = PICT_BOTTOM_FIELD - v->tff;
919 if ((ret = avctx->hwaccel->start_frame(avctx, buf_start, buf_start_second_field - buf_start)) < 0)
920 goto err;
921
922 if (n_slices1 == -1) {
923 // no slices, decode the field as-is
924 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start, buf_start_second_field - buf_start)) < 0)
925 goto err;
926 } else {
927 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start, slices[0].rawbuf - buf_start)) < 0)
928 goto err;
929
930 for (i = 0 ; i < n_slices1 + 1; i++) {
931 s->gb = slices[i].gb;
932 s->mb_y = slices[i].mby_start;
933
934 v->pic_header_flag = get_bits1(&s->gb);
935 if (v->pic_header_flag) {
936 if (ff_vc1_parse_frame_header_adv(v, &s->gb) < 0) {
937 av_log(v->s.avctx, AV_LOG_ERROR, "Slice header damaged\n");
938 ret = AVERROR_INVALIDDATA;
939 if (avctx->err_recognition & AV_EF_EXPLODE)
940 goto err;
941 continue;
942 }
943 }
944
945 if ((ret = avctx->hwaccel->decode_slice(avctx, slices[i].rawbuf, slices[i].raw_size)) < 0)
946 goto err;
947 }
948 }
949
950 if ((ret = avctx->hwaccel->end_frame(avctx)) < 0)
951 goto err;
952
953 // decode second field
954 s->gb = slices[n_slices1 + 1].gb;
955 s->mb_y = slices[n_slices1 + 1].mby_start;
956 s->picture_structure = PICT_TOP_FIELD + v->tff;
957 v->second_field = 1;
958 v->pic_header_flag = 0;
959 if (ff_vc1_parse_frame_header_adv(v, &s->gb) < 0) {
960 av_log(avctx, AV_LOG_ERROR, "parsing header for second field failed");
961 ret = AVERROR_INVALIDDATA;
962 goto err;
963 }
964 v->s.current_picture_ptr->f->pict_type = v->s.pict_type;
965
966 if ((ret = avctx->hwaccel->start_frame(avctx, buf_start_second_field, (buf + buf_size) - buf_start_second_field)) < 0)
967 goto err;
968
969 if (n_slices - n_slices1 == 2) {
970 // no slices, decode the field as-is
971 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start_second_field, (buf + buf_size) - buf_start_second_field)) < 0)
972 goto err;
973 } else {
974 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start_second_field, slices[n_slices1 + 2].rawbuf - buf_start_second_field)) < 0)
975 goto err;
976
977 for (i = n_slices1 + 2; i < n_slices; i++) {
978 s->gb = slices[i].gb;
979 s->mb_y = slices[i].mby_start;
980
981 v->pic_header_flag = get_bits1(&s->gb);
982 if (v->pic_header_flag) {
983 if (ff_vc1_parse_frame_header_adv(v, &s->gb) < 0) {
984 av_log(v->s.avctx, AV_LOG_ERROR, "Slice header damaged\n");
985 ret = AVERROR_INVALIDDATA;
986 if (avctx->err_recognition & AV_EF_EXPLODE)
987 goto err;
988 continue;
989 }
990 }
991
992 if ((ret = avctx->hwaccel->decode_slice(avctx, slices[i].rawbuf, slices[i].raw_size)) < 0)
993 goto err;
994 }
995 }
996
997 if ((ret = avctx->hwaccel->end_frame(avctx)) < 0)
998 goto err;
999 } else {
1000 s->picture_structure = PICT_FRAME;
1001 if ((ret = avctx->hwaccel->start_frame(avctx, buf_start, (buf + buf_size) - buf_start)) < 0)
1002 goto err;
1003
1004 if (n_slices == 0) {
1005 // no slices, decode the frame as-is
1006 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start, (buf + buf_size) - buf_start)) < 0)
1007 goto err;
1008 } else {
1009 // decode the frame part as the first slice
1010 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start, slices[0].rawbuf - buf_start)) < 0)
1011 goto err;
1012
1013 // and process the slices as additional slices afterwards
1014 for (i = 0 ; i < n_slices; i++) {
1015 s->gb = slices[i].gb;
1016 s->mb_y = slices[i].mby_start;
1017
1018 v->pic_header_flag = get_bits1(&s->gb);
1019 if (v->pic_header_flag) {
1020 if (ff_vc1_parse_frame_header_adv(v, &s->gb) < 0) {
1021 av_log(v->s.avctx, AV_LOG_ERROR, "Slice header damaged\n");
1022 ret = AVERROR_INVALIDDATA;
1023 if (avctx->err_recognition & AV_EF_EXPLODE)
1024 goto err;
1025 continue;
1026 }
1027 }
1028
1029 if ((ret = avctx->hwaccel->decode_slice(avctx, slices[i].rawbuf, slices[i].raw_size)) < 0)
1030 goto err;
1031 }
1032 }
1033 if ((ret = avctx->hwaccel->end_frame(avctx)) < 0)
1034 goto err;
1035 }
1036 } else {
1037 int header_ret = 0;
1038
1039 ff_mpeg_er_frame_start(s);
1040
1041 v->end_mb_x = s->mb_width;
1042 if (v->field_mode) {
1043 s->current_picture.f->linesize[0] <<= 1;
1044 s->current_picture.f->linesize[1] <<= 1;
1045 s->current_picture.f->linesize[2] <<= 1;
1046 s->linesize <<= 1;
1047 s->uvlinesize <<= 1;
1048 }
1049 mb_height = s->mb_height >> v->field_mode;
1050
1051 av_assert0 (mb_height > 0);
1052
1053 for (i = 0; i <= n_slices; i++) {
1054 if (i > 0 && slices[i - 1].mby_start >= mb_height) {
1055 if (v->field_mode <= 0) {
1056 av_log(v->s.avctx, AV_LOG_ERROR, "Slice %d starts beyond "
1057 "picture boundary (%d >= %d)\n", i,
1058 slices[i - 1].mby_start, mb_height);
1059 continue;
1060 }
1061 v->second_field = 1;
1062 av_assert0((s->mb_height & 1) == 0);
1063 v->blocks_off = s->b8_stride * (s->mb_height&~1);
1064 v->mb_off = s->mb_stride * s->mb_height >> 1;
1065 } else {
1066 v->second_field = 0;
1067 v->blocks_off = 0;
1068 v->mb_off = 0;
1069 }
1070 if (i) {
1071 v->pic_header_flag = 0;
1072 if (v->field_mode && i == n_slices1 + 2) {
1073 if ((header_ret = ff_vc1_parse_frame_header_adv(v, &s->gb)) < 0) {
1074 av_log(v->s.avctx, AV_LOG_ERROR, "Field header damaged\n");
1075 ret = AVERROR_INVALIDDATA;
1076 if (avctx->err_recognition & AV_EF_EXPLODE)
1077 goto err;
1078 continue;
1079 }
1080 } else if (get_bits1(&s->gb)) {
1081 v->pic_header_flag = 1;
1082 if ((header_ret = ff_vc1_parse_frame_header_adv(v, &s->gb)) < 0) {
1083 av_log(v->s.avctx, AV_LOG_ERROR, "Slice header damaged\n");
1084 ret = AVERROR_INVALIDDATA;
1085 if (avctx->err_recognition & AV_EF_EXPLODE)
1086 goto err;
1087 continue;
1088 }
1089 }
1090 }
1091 if (header_ret < 0)
1092 continue;
1093 s->start_mb_y = (i == 0) ? 0 : FFMAX(0, slices[i-1].mby_start % mb_height);
1094 if (!v->field_mode || v->second_field)
1095 s->end_mb_y = (i == n_slices ) ? mb_height : FFMIN(mb_height, slices[i].mby_start % mb_height);
1096 else {
1097 if (i >= n_slices) {
1098 av_log(v->s.avctx, AV_LOG_ERROR, "first field slice count too large\n");
1099 continue;
1100 }
1101 s->end_mb_y = (i == n_slices1 + 1) ? mb_height : FFMIN(mb_height, slices[i].mby_start % mb_height);
1102 }
1103 if (s->end_mb_y <= s->start_mb_y) {
1104 av_log(v->s.avctx, AV_LOG_ERROR, "end mb y %d %d invalid\n", s->end_mb_y, s->start_mb_y);
1105 continue;
1106 }
1107 if (((s->pict_type == AV_PICTURE_TYPE_P && !v->p_frame_skipped) ||
1108 (s->pict_type == AV_PICTURE_TYPE_B && !v->bi_type)) &&
1109 !v->cbpcy_vlc) {
1110 av_log(v->s.avctx, AV_LOG_ERROR, "missing cbpcy_vlc\n");
1111 continue;
1112 }
1113 ff_vc1_decode_blocks(v);
1114 if (i != n_slices) {
1115 s->gb = slices[i].gb;
1116 }
1117 }
1118 if (v->field_mode) {
1119 v->second_field = 0;
1120 s->current_picture.f->linesize[0] >>= 1;
1121 s->current_picture.f->linesize[1] >>= 1;
1122 s->current_picture.f->linesize[2] >>= 1;
1123 s->linesize >>= 1;
1124 s->uvlinesize >>= 1;
1125 if (v->s.pict_type != AV_PICTURE_TYPE_BI && v->s.pict_type != AV_PICTURE_TYPE_B) {
1126 FFSWAP(uint8_t *, v->mv_f_next[0], v->mv_f[0]);
1127 FFSWAP(uint8_t *, v->mv_f_next[1], v->mv_f[1]);
1128 }
1129 }
1130 ff_dlog(s->avctx, "Consumed %i/%i bits\n",
1131 get_bits_count(&s->gb), s->gb.size_in_bits);
1132 // if (get_bits_count(&s->gb) > buf_size * 8)
1133 // return -1;
1134 if(s->er.error_occurred && s->pict_type == AV_PICTURE_TYPE_B) {
1135 ret = AVERROR_INVALIDDATA;
1136 goto err;
1137 }
1138 if (!v->field_mode)
1139 ff_er_frame_end(&s->er);
1140 }
1141
1142 ff_mpv_frame_end(s);
1143
1144 if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
1145 image:
1146 avctx->width = avctx->coded_width = v->output_width;
1147 avctx->height = avctx->coded_height = v->output_height;
1148 if (avctx->skip_frame >= AVDISCARD_NONREF)
1149 goto end;
1150 #if CONFIG_WMV3IMAGE_DECODER || CONFIG_VC1IMAGE_DECODER
1151 if ((ret = vc1_decode_sprites(v, &s->gb)) < 0)
1152 goto err;
1153 #endif
1154 if ((ret = av_frame_ref(pict, v->sprite_output_frame)) < 0)
1155 goto err;
1156 *got_frame = 1;
1157 } else {
1158 if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) {
1159 if ((ret = av_frame_ref(pict, s->current_picture_ptr->f)) < 0)
1160 goto err;
1161 ff_print_debug_info(s, s->current_picture_ptr, pict);
1162 *got_frame = 1;
1163 } else if (s->last_picture_ptr) {
1164 if ((ret = av_frame_ref(pict, s->last_picture_ptr->f)) < 0)
1165 goto err;
1166 ff_print_debug_info(s, s->last_picture_ptr, pict);
1167 *got_frame = 1;
1168 }
1169 }
1170
1171 end:
1172 av_free(buf2);
1173 for (i = 0; i < n_slices; i++)
1174 av_free(slices[i].buf);
1175 av_free(slices);
1176 return buf_size;
1177
1178 err:
1179 av_free(buf2);
1180 for (i = 0; i < n_slices; i++)
1181 av_free(slices[i].buf);
1182 av_free(slices);
1183 return ret;
1184 }
1185
1186
1187 static const enum AVPixelFormat vc1_hwaccel_pixfmt_list_420[] = {
1188 #if CONFIG_VC1_DXVA2_HWACCEL
1189 AV_PIX_FMT_DXVA2_VLD,
1190 #endif
1191 #if CONFIG_VC1_D3D11VA_HWACCEL
1192 AV_PIX_FMT_D3D11VA_VLD,
1193 AV_PIX_FMT_D3D11,
1194 #endif
1195 #if CONFIG_VC1_NVDEC_HWACCEL
1196 AV_PIX_FMT_CUDA,
1197 #endif
1198 #if CONFIG_VC1_VAAPI_HWACCEL
1199 AV_PIX_FMT_VAAPI,
1200 #endif
1201 #if CONFIG_VC1_VDPAU_HWACCEL
1202 AV_PIX_FMT_VDPAU,
1203 #endif
1204 AV_PIX_FMT_YUV420P,
1205 AV_PIX_FMT_NONE
1206 };
1207
1208 AVCodec ff_vc1_decoder = {
1209 .name = "vc1",
1210 .long_name = NULL_IF_CONFIG_SMALL("SMPTE VC-1"),
1211 .type = AVMEDIA_TYPE_VIDEO,
1212 .id = AV_CODEC_ID_VC1,
1213 .priv_data_size = sizeof(VC1Context),
1214 .init = vc1_decode_init,
1215 .close = ff_vc1_decode_end,
1216 .decode = vc1_decode_frame,
1217 .flush = ff_mpeg_flush,
1218 .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY,
1219 .pix_fmts = vc1_hwaccel_pixfmt_list_420,
1220 .hw_configs = (const AVCodecHWConfigInternal*[]) {
1221 #if CONFIG_VC1_DXVA2_HWACCEL
1222 HWACCEL_DXVA2(vc1),
1223 #endif
1224 #if CONFIG_VC1_D3D11VA_HWACCEL
1225 HWACCEL_D3D11VA(vc1),
1226 #endif
1227 #if CONFIG_VC1_D3D11VA2_HWACCEL
1228 HWACCEL_D3D11VA2(vc1),
1229 #endif
1230 #if CONFIG_VC1_NVDEC_HWACCEL
1231 HWACCEL_NVDEC(vc1),
1232 #endif
1233 #if CONFIG_VC1_VAAPI_HWACCEL
1234 HWACCEL_VAAPI(vc1),
1235 #endif
1236 #if CONFIG_VC1_VDPAU_HWACCEL
1237 HWACCEL_VDPAU(vc1),
1238 #endif
1239 NULL
1240 },
1241 .profiles = NULL_IF_CONFIG_SMALL(ff_vc1_profiles)
1242 };
1243
1244 #if CONFIG_WMV3_DECODER
1245 AVCodec ff_wmv3_decoder = {
1246 .name = "wmv3",
1247 .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9"),
1248 .type = AVMEDIA_TYPE_VIDEO,
1249 .id = AV_CODEC_ID_WMV3,
1250 .priv_data_size = sizeof(VC1Context),
1251 .init = vc1_decode_init,
1252 .close = ff_vc1_decode_end,
1253 .decode = vc1_decode_frame,
1254 .flush = ff_mpeg_flush,
1255 .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY,
1256 .pix_fmts = vc1_hwaccel_pixfmt_list_420,
1257 .hw_configs = (const AVCodecHWConfigInternal*[]) {
1258 #if CONFIG_WMV3_DXVA2_HWACCEL
1259 HWACCEL_DXVA2(wmv3),
1260 #endif
1261 #if CONFIG_WMV3_D3D11VA_HWACCEL
1262 HWACCEL_D3D11VA(wmv3),
1263 #endif
1264 #if CONFIG_WMV3_D3D11VA2_HWACCEL
1265 HWACCEL_D3D11VA2(wmv3),
1266 #endif
1267 #if CONFIG_WMV3_NVDEC_HWACCEL
1268 HWACCEL_NVDEC(wmv3),
1269 #endif
1270 #if CONFIG_WMV3_VAAPI_HWACCEL
1271 HWACCEL_VAAPI(wmv3),
1272 #endif
1273 #if CONFIG_WMV3_VDPAU_HWACCEL
1274 HWACCEL_VDPAU(wmv3),
1275 #endif
1276 NULL
1277 },
1278 .profiles = NULL_IF_CONFIG_SMALL(ff_vc1_profiles)
1279 };
1280 #endif
1281
1282 #if CONFIG_WMV3IMAGE_DECODER
1283 AVCodec ff_wmv3image_decoder = {
1284 .name = "wmv3image",
1285 .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9 Image"),
1286 .type = AVMEDIA_TYPE_VIDEO,
1287 .id = AV_CODEC_ID_WMV3IMAGE,
1288 .priv_data_size = sizeof(VC1Context),
1289 .init = vc1_decode_init,
1290 .close = ff_vc1_decode_end,
1291 .decode = vc1_decode_frame,
1292 .capabilities = AV_CODEC_CAP_DR1,
1293 .flush = vc1_sprite_flush,
1294 .pix_fmts = (const enum AVPixelFormat[]) {
1295 AV_PIX_FMT_YUV420P,
1296 AV_PIX_FMT_NONE
1297 },
1298 };
1299 #endif
1300
1301 #if CONFIG_VC1IMAGE_DECODER
1302 AVCodec ff_vc1image_decoder = {
1303 .name = "vc1image",
1304 .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9 Image v2"),
1305 .type = AVMEDIA_TYPE_VIDEO,
1306 .id = AV_CODEC_ID_VC1IMAGE,
1307 .priv_data_size = sizeof(VC1Context),
1308 .init = vc1_decode_init,
1309 .close = ff_vc1_decode_end,
1310 .decode = vc1_decode_frame,
1311 .capabilities = AV_CODEC_CAP_DR1,
1312 .flush = vc1_sprite_flush,
1313 .pix_fmts = (const enum AVPixelFormat[]) {
1314 AV_PIX_FMT_YUV420P,
1315 AV_PIX_FMT_NONE
1316 },
1317 };
1318 #endif
1319