1 /*
2 * MJPEG encoder
3 * Copyright (c) 2000, 2001 Fabrice Bellard
4 * Copyright (c) 2003 Alex Beregszaszi
5 * Copyright (c) 2003-2004 Michael Niedermayer
6 *
7 * Support for external huffman table, various fixes (AVID workaround),
8 * aspecting, new decode_frame mechanism and apple mjpeg-b support
9 * by Alex Beregszaszi
10 *
11 * This file is part of FFmpeg.
12 *
13 * FFmpeg is free software; you can redistribute it and/or
14 * modify it under the terms of the GNU Lesser General Public
15 * License as published by the Free Software Foundation; either
16 * version 2.1 of the License, or (at your option) any later version.
17 *
18 * FFmpeg is distributed in the hope that it will be useful,
19 * but WITHOUT ANY WARRANTY; without even the implied warranty of
20 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
21 * Lesser General Public License for more details.
22 *
23 * You should have received a copy of the GNU Lesser General Public
24 * License along with FFmpeg; if not, write to the Free Software
25 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
26 */
27
28 /**
29 * @file
30 * MJPEG encoder.
31 */
32
33 #include "config_components.h"
34
35 #include "libavutil/pixdesc.h"
36
37 #include "avcodec.h"
38 #include "codec_internal.h"
39 #include "jpegtables.h"
40 #include "mjpegenc_common.h"
41 #include "mjpegenc_huffman.h"
42 #include "mpegvideo.h"
43 #include "mjpeg.h"
44 #include "mjpegenc.h"
45 #include "mpegvideoenc.h"
46 #include "profiles.h"
47
48 /* The following is the private context of MJPEG/AMV decoder.
49 * Note that when using slice threading only the main thread's
50 * MpegEncContext is followed by a MjpegContext; the other threads
51 * can access this shared context via MpegEncContext.mjpeg. */
52 typedef struct MJPEGEncContext {
53 MpegEncContext mpeg;
54 MJpegContext mjpeg;
55 } MJPEGEncContext;
56
init_uni_ac_vlc(const uint8_t huff_size_ac[256],uint8_t * uni_ac_vlc_len)57 static av_cold void init_uni_ac_vlc(const uint8_t huff_size_ac[256],
58 uint8_t *uni_ac_vlc_len)
59 {
60 for (int i = 0; i < 128; i++) {
61 int level = i - 64;
62 if (!level)
63 continue;
64 for (int run = 0; run < 64; run++) {
65 int len, code, nbits;
66 int alevel = FFABS(level);
67
68 len = (run >> 4) * huff_size_ac[0xf0];
69
70 nbits= av_log2_16bit(alevel) + 1;
71 code = ((15&run) << 4) | nbits;
72
73 len += huff_size_ac[code] + nbits;
74
75 uni_ac_vlc_len[UNI_AC_ENC_INDEX(run, i)] = len;
76 // We ignore EOB as its just a constant which does not change generally
77 }
78 }
79 }
80
mjpeg_encode_picture_header(MpegEncContext * s)81 static void mjpeg_encode_picture_header(MpegEncContext *s)
82 {
83 ff_mjpeg_encode_picture_header(s->avctx, &s->pb, s->picture->f, s->mjpeg_ctx,
84 &s->intra_scantable, 0,
85 s->intra_matrix, s->chroma_intra_matrix,
86 s->slice_context_count > 1);
87
88 s->esc_pos = put_bytes_count(&s->pb, 0);
89 for (int i = 1; i < s->slice_context_count; i++)
90 s->thread_context[i]->esc_pos = 0;
91 }
92
ff_mjpeg_amv_encode_picture_header(MpegEncContext * s)93 void ff_mjpeg_amv_encode_picture_header(MpegEncContext *s)
94 {
95 MJPEGEncContext *const m = (MJPEGEncContext*)s;
96 av_assert2(s->mjpeg_ctx == &m->mjpeg);
97 /* s->huffman == HUFFMAN_TABLE_OPTIMAL can only be true for MJPEG. */
98 if (!CONFIG_MJPEG_ENCODER || m->mjpeg.huffman != HUFFMAN_TABLE_OPTIMAL)
99 mjpeg_encode_picture_header(s);
100 }
101
102 #if CONFIG_MJPEG_ENCODER
103 /**
104 * Encodes and outputs the entire frame in the JPEG format.
105 *
106 * @param s The MpegEncContext.
107 */
mjpeg_encode_picture_frame(MpegEncContext * s)108 static void mjpeg_encode_picture_frame(MpegEncContext *s)
109 {
110 int nbits, code, table_id;
111 MJpegContext *m = s->mjpeg_ctx;
112 uint8_t *huff_size[4] = { m->huff_size_dc_luminance,
113 m->huff_size_dc_chrominance,
114 m->huff_size_ac_luminance,
115 m->huff_size_ac_chrominance };
116 uint16_t *huff_code[4] = { m->huff_code_dc_luminance,
117 m->huff_code_dc_chrominance,
118 m->huff_code_ac_luminance,
119 m->huff_code_ac_chrominance };
120 size_t total_bits = 0;
121 size_t bytes_needed;
122
123 s->header_bits = get_bits_diff(s);
124 // Estimate the total size first
125 for (int i = 0; i < m->huff_ncode; i++) {
126 table_id = m->huff_buffer[i].table_id;
127 code = m->huff_buffer[i].code;
128 nbits = code & 0xf;
129
130 total_bits += huff_size[table_id][code] + nbits;
131 }
132
133 bytes_needed = (total_bits + 7) / 8;
134 ff_mpv_reallocate_putbitbuffer(s, bytes_needed, bytes_needed);
135
136 for (int i = 0; i < m->huff_ncode; i++) {
137 table_id = m->huff_buffer[i].table_id;
138 code = m->huff_buffer[i].code;
139 nbits = code & 0xf;
140
141 put_bits(&s->pb, huff_size[table_id][code], huff_code[table_id][code]);
142 if (nbits != 0) {
143 put_sbits(&s->pb, nbits, m->huff_buffer[i].mant);
144 }
145 }
146
147 m->huff_ncode = 0;
148 s->i_tex_bits = get_bits_diff(s);
149 }
150
151 /**
152 * Builds all 4 optimal Huffman tables.
153 *
154 * Uses the data stored in the JPEG buffer to compute the tables.
155 * Stores the Huffman tables in the bits_* and val_* arrays in the MJpegContext.
156 *
157 * @param m MJpegContext containing the JPEG buffer.
158 */
mjpeg_build_optimal_huffman(MJpegContext * m)159 static void mjpeg_build_optimal_huffman(MJpegContext *m)
160 {
161 MJpegEncHuffmanContext dc_luminance_ctx;
162 MJpegEncHuffmanContext dc_chrominance_ctx;
163 MJpegEncHuffmanContext ac_luminance_ctx;
164 MJpegEncHuffmanContext ac_chrominance_ctx;
165 MJpegEncHuffmanContext *ctx[4] = { &dc_luminance_ctx,
166 &dc_chrominance_ctx,
167 &ac_luminance_ctx,
168 &ac_chrominance_ctx };
169 for (int i = 0; i < 4; i++)
170 ff_mjpeg_encode_huffman_init(ctx[i]);
171
172 for (int i = 0; i < m->huff_ncode; i++) {
173 int table_id = m->huff_buffer[i].table_id;
174 int code = m->huff_buffer[i].code;
175
176 ff_mjpeg_encode_huffman_increment(ctx[table_id], code);
177 }
178
179 ff_mjpeg_encode_huffman_close(&dc_luminance_ctx,
180 m->bits_dc_luminance,
181 m->val_dc_luminance, 12);
182 ff_mjpeg_encode_huffman_close(&dc_chrominance_ctx,
183 m->bits_dc_chrominance,
184 m->val_dc_chrominance, 12);
185 ff_mjpeg_encode_huffman_close(&ac_luminance_ctx,
186 m->bits_ac_luminance,
187 m->val_ac_luminance, 256);
188 ff_mjpeg_encode_huffman_close(&ac_chrominance_ctx,
189 m->bits_ac_chrominance,
190 m->val_ac_chrominance, 256);
191
192 ff_mjpeg_build_huffman_codes(m->huff_size_dc_luminance,
193 m->huff_code_dc_luminance,
194 m->bits_dc_luminance,
195 m->val_dc_luminance);
196 ff_mjpeg_build_huffman_codes(m->huff_size_dc_chrominance,
197 m->huff_code_dc_chrominance,
198 m->bits_dc_chrominance,
199 m->val_dc_chrominance);
200 ff_mjpeg_build_huffman_codes(m->huff_size_ac_luminance,
201 m->huff_code_ac_luminance,
202 m->bits_ac_luminance,
203 m->val_ac_luminance);
204 ff_mjpeg_build_huffman_codes(m->huff_size_ac_chrominance,
205 m->huff_code_ac_chrominance,
206 m->bits_ac_chrominance,
207 m->val_ac_chrominance);
208 }
209 #endif
210
211 /**
212 * Writes the complete JPEG frame when optimal huffman tables are enabled,
213 * otherwise writes the stuffing.
214 *
215 * Header + values + stuffing.
216 *
217 * @param s The MpegEncContext.
218 * @return int Error code, 0 if successful.
219 */
ff_mjpeg_encode_stuffing(MpegEncContext * s)220 int ff_mjpeg_encode_stuffing(MpegEncContext *s)
221 {
222 MJpegContext *const m = s->mjpeg_ctx;
223 PutBitContext *pbc = &s->pb;
224 int mb_y = s->mb_y - !s->mb_x;
225 int ret;
226
227 #if CONFIG_MJPEG_ENCODER
228 if (m->huffman == HUFFMAN_TABLE_OPTIMAL) {
229
230 mjpeg_build_optimal_huffman(m);
231
232 // Replace the VLCs with the optimal ones.
233 // The default ones may be used for trellis during quantization.
234 init_uni_ac_vlc(m->huff_size_ac_luminance, m->uni_ac_vlc_len);
235 init_uni_ac_vlc(m->huff_size_ac_chrominance, m->uni_chroma_ac_vlc_len);
236 s->intra_ac_vlc_length =
237 s->intra_ac_vlc_last_length = m->uni_ac_vlc_len;
238 s->intra_chroma_ac_vlc_length =
239 s->intra_chroma_ac_vlc_last_length = m->uni_chroma_ac_vlc_len;
240
241 mjpeg_encode_picture_header(s);
242 mjpeg_encode_picture_frame(s);
243 }
244 #endif
245
246 ret = ff_mpv_reallocate_putbitbuffer(s, put_bits_count(&s->pb) / 8 + 100,
247 put_bits_count(&s->pb) / 4 + 1000);
248 if (ret < 0) {
249 av_log(s->avctx, AV_LOG_ERROR, "Buffer reallocation failed\n");
250 goto fail;
251 }
252
253 ff_mjpeg_escape_FF(pbc, s->esc_pos);
254
255 if (s->slice_context_count > 1 && mb_y < s->mb_height - 1)
256 put_marker(pbc, RST0 + (mb_y&7));
257 s->esc_pos = put_bytes_count(pbc, 0);
258
259 fail:
260 for (int i = 0; i < 3; i++)
261 s->last_dc[i] = 128 << s->intra_dc_precision;
262
263 return ret;
264 }
265
alloc_huffman(MpegEncContext * s)266 static int alloc_huffman(MpegEncContext *s)
267 {
268 MJpegContext *m = s->mjpeg_ctx;
269 size_t num_mbs, num_blocks, num_codes;
270 int blocks_per_mb;
271
272 // We need to init this here as the mjpeg init is called before the common init,
273 s->mb_width = (s->width + 15) / 16;
274 s->mb_height = (s->height + 15) / 16;
275
276 switch (s->chroma_format) {
277 case CHROMA_420: blocks_per_mb = 6; break;
278 case CHROMA_422: blocks_per_mb = 8; break;
279 case CHROMA_444: blocks_per_mb = 12; break;
280 default: av_assert0(0);
281 };
282
283 // Make sure we have enough space to hold this frame.
284 num_mbs = s->mb_width * s->mb_height;
285 num_blocks = num_mbs * blocks_per_mb;
286 num_codes = num_blocks * 64;
287
288 m->huff_buffer = av_malloc_array(num_codes, sizeof(MJpegHuffmanCode));
289 if (!m->huff_buffer)
290 return AVERROR(ENOMEM);
291 return 0;
292 }
293
ff_mjpeg_encode_init(MpegEncContext * s)294 av_cold int ff_mjpeg_encode_init(MpegEncContext *s)
295 {
296 MJpegContext *const m = &((MJPEGEncContext*)s)->mjpeg;
297 int ret, use_slices;
298
299 s->mjpeg_ctx = m;
300 use_slices = s->avctx->slices > 0 ? s->avctx->slices > 1 :
301 (s->avctx->active_thread_type & FF_THREAD_SLICE) &&
302 s->avctx->thread_count > 1;
303
304 if (s->codec_id == AV_CODEC_ID_AMV || use_slices)
305 m->huffman = HUFFMAN_TABLE_DEFAULT;
306
307 if (s->mpv_flags & FF_MPV_FLAG_QP_RD) {
308 // Used to produce garbage with MJPEG.
309 av_log(s->avctx, AV_LOG_ERROR,
310 "QP RD is no longer compatible with MJPEG or AMV\n");
311 return AVERROR(EINVAL);
312 }
313
314 /* The following check is automatically true for AMV,
315 * but it doesn't hurt either. */
316 ret = ff_mjpeg_encode_check_pix_fmt(s->avctx);
317 if (ret < 0)
318 return ret;
319
320 if (s->width > 65500 || s->height > 65500) {
321 av_log(s, AV_LOG_ERROR, "JPEG does not support resolutions above 65500x65500\n");
322 return AVERROR(EINVAL);
323 }
324
325 s->min_qcoeff=-1023;
326 s->max_qcoeff= 1023;
327
328 // Build default Huffman tables.
329 // These may be overwritten later with more optimal Huffman tables, but
330 // they are needed at least right now for some processes like trellis.
331 ff_mjpeg_build_huffman_codes(m->huff_size_dc_luminance,
332 m->huff_code_dc_luminance,
333 ff_mjpeg_bits_dc_luminance,
334 ff_mjpeg_val_dc);
335 ff_mjpeg_build_huffman_codes(m->huff_size_dc_chrominance,
336 m->huff_code_dc_chrominance,
337 ff_mjpeg_bits_dc_chrominance,
338 ff_mjpeg_val_dc);
339 ff_mjpeg_build_huffman_codes(m->huff_size_ac_luminance,
340 m->huff_code_ac_luminance,
341 ff_mjpeg_bits_ac_luminance,
342 ff_mjpeg_val_ac_luminance);
343 ff_mjpeg_build_huffman_codes(m->huff_size_ac_chrominance,
344 m->huff_code_ac_chrominance,
345 ff_mjpeg_bits_ac_chrominance,
346 ff_mjpeg_val_ac_chrominance);
347
348 init_uni_ac_vlc(m->huff_size_ac_luminance, m->uni_ac_vlc_len);
349 init_uni_ac_vlc(m->huff_size_ac_chrominance, m->uni_chroma_ac_vlc_len);
350 s->intra_ac_vlc_length =
351 s->intra_ac_vlc_last_length = m->uni_ac_vlc_len;
352 s->intra_chroma_ac_vlc_length =
353 s->intra_chroma_ac_vlc_last_length = m->uni_chroma_ac_vlc_len;
354
355 // Buffers start out empty.
356 m->huff_ncode = 0;
357
358 if (m->huffman == HUFFMAN_TABLE_OPTIMAL)
359 return alloc_huffman(s);
360
361 return 0;
362 }
363
mjpeg_encode_close(AVCodecContext * avctx)364 static av_cold int mjpeg_encode_close(AVCodecContext *avctx)
365 {
366 MJPEGEncContext *const mjpeg = avctx->priv_data;
367 av_freep(&mjpeg->mjpeg.huff_buffer);
368 ff_mpv_encode_end(avctx);
369 return 0;
370 }
371
372 /**
373 * Add code and table_id to the JPEG buffer.
374 *
375 * @param s The MJpegContext which contains the JPEG buffer.
376 * @param table_id Which Huffman table the code belongs to.
377 * @param code The encoded exponent of the coefficients and the run-bits.
378 */
ff_mjpeg_encode_code(MJpegContext * s,uint8_t table_id,int code)379 static inline void ff_mjpeg_encode_code(MJpegContext *s, uint8_t table_id, int code)
380 {
381 MJpegHuffmanCode *c = &s->huff_buffer[s->huff_ncode++];
382 c->table_id = table_id;
383 c->code = code;
384 }
385
386 /**
387 * Add the coefficient's data to the JPEG buffer.
388 *
389 * @param s The MJpegContext which contains the JPEG buffer.
390 * @param table_id Which Huffman table the code belongs to.
391 * @param val The coefficient.
392 * @param run The run-bits.
393 */
ff_mjpeg_encode_coef(MJpegContext * s,uint8_t table_id,int val,int run)394 static void ff_mjpeg_encode_coef(MJpegContext *s, uint8_t table_id, int val, int run)
395 {
396 int mant, code;
397
398 if (val == 0) {
399 av_assert0(run == 0);
400 ff_mjpeg_encode_code(s, table_id, 0);
401 } else {
402 mant = val;
403 if (val < 0) {
404 val = -val;
405 mant--;
406 }
407
408 code = (run << 4) | (av_log2_16bit(val) + 1);
409
410 s->huff_buffer[s->huff_ncode].mant = mant;
411 ff_mjpeg_encode_code(s, table_id, code);
412 }
413 }
414
415 /**
416 * Add the block's data into the JPEG buffer.
417 *
418 * @param s The MpegEncContext that contains the JPEG buffer.
419 * @param block The block.
420 * @param n The block's index or number.
421 */
record_block(MpegEncContext * s,int16_t * block,int n)422 static void record_block(MpegEncContext *s, int16_t *block, int n)
423 {
424 int i, j, table_id;
425 int component, dc, last_index, val, run;
426 MJpegContext *m = s->mjpeg_ctx;
427
428 /* DC coef */
429 component = (n <= 3 ? 0 : (n&1) + 1);
430 table_id = (n <= 3 ? 0 : 1);
431 dc = block[0]; /* overflow is impossible */
432 val = dc - s->last_dc[component];
433
434 ff_mjpeg_encode_coef(m, table_id, val, 0);
435
436 s->last_dc[component] = dc;
437
438 /* AC coefs */
439
440 run = 0;
441 last_index = s->block_last_index[n];
442 table_id |= 2;
443
444 for(i=1;i<=last_index;i++) {
445 j = s->intra_scantable.permutated[i];
446 val = block[j];
447
448 if (val == 0) {
449 run++;
450 } else {
451 while (run >= 16) {
452 ff_mjpeg_encode_code(m, table_id, 0xf0);
453 run -= 16;
454 }
455 ff_mjpeg_encode_coef(m, table_id, val, run);
456 run = 0;
457 }
458 }
459
460 /* output EOB only if not already 64 values */
461 if (last_index < 63 || run != 0)
462 ff_mjpeg_encode_code(m, table_id, 0);
463 }
464
encode_block(MpegEncContext * s,int16_t * block,int n)465 static void encode_block(MpegEncContext *s, int16_t *block, int n)
466 {
467 int mant, nbits, code, i, j;
468 int component, dc, run, last_index, val;
469 MJpegContext *m = s->mjpeg_ctx;
470 uint8_t *huff_size_ac;
471 uint16_t *huff_code_ac;
472
473 /* DC coef */
474 component = (n <= 3 ? 0 : (n&1) + 1);
475 dc = block[0]; /* overflow is impossible */
476 val = dc - s->last_dc[component];
477 if (n < 4) {
478 ff_mjpeg_encode_dc(&s->pb, val, m->huff_size_dc_luminance, m->huff_code_dc_luminance);
479 huff_size_ac = m->huff_size_ac_luminance;
480 huff_code_ac = m->huff_code_ac_luminance;
481 } else {
482 ff_mjpeg_encode_dc(&s->pb, val, m->huff_size_dc_chrominance, m->huff_code_dc_chrominance);
483 huff_size_ac = m->huff_size_ac_chrominance;
484 huff_code_ac = m->huff_code_ac_chrominance;
485 }
486 s->last_dc[component] = dc;
487
488 /* AC coefs */
489
490 run = 0;
491 last_index = s->block_last_index[n];
492 for(i=1;i<=last_index;i++) {
493 j = s->intra_scantable.permutated[i];
494 val = block[j];
495 if (val == 0) {
496 run++;
497 } else {
498 while (run >= 16) {
499 put_bits(&s->pb, huff_size_ac[0xf0], huff_code_ac[0xf0]);
500 run -= 16;
501 }
502 mant = val;
503 if (val < 0) {
504 val = -val;
505 mant--;
506 }
507
508 nbits= av_log2_16bit(val) + 1;
509 code = (run << 4) | nbits;
510
511 put_bits(&s->pb, huff_size_ac[code], huff_code_ac[code]);
512
513 put_sbits(&s->pb, nbits, mant);
514 run = 0;
515 }
516 }
517
518 /* output EOB only if not already 64 values */
519 if (last_index < 63 || run != 0)
520 put_bits(&s->pb, huff_size_ac[0], huff_code_ac[0]);
521 }
522
ff_mjpeg_encode_mb(MpegEncContext * s,int16_t block[12][64])523 void ff_mjpeg_encode_mb(MpegEncContext *s, int16_t block[12][64])
524 {
525 int i;
526 if (s->mjpeg_ctx->huffman == HUFFMAN_TABLE_OPTIMAL) {
527 if (s->chroma_format == CHROMA_444) {
528 record_block(s, block[0], 0);
529 record_block(s, block[2], 2);
530 record_block(s, block[4], 4);
531 record_block(s, block[8], 8);
532 record_block(s, block[5], 5);
533 record_block(s, block[9], 9);
534
535 if (16*s->mb_x+8 < s->width) {
536 record_block(s, block[1], 1);
537 record_block(s, block[3], 3);
538 record_block(s, block[6], 6);
539 record_block(s, block[10], 10);
540 record_block(s, block[7], 7);
541 record_block(s, block[11], 11);
542 }
543 } else {
544 for(i=0;i<5;i++) {
545 record_block(s, block[i], i);
546 }
547 if (s->chroma_format == CHROMA_420) {
548 record_block(s, block[5], 5);
549 } else {
550 record_block(s, block[6], 6);
551 record_block(s, block[5], 5);
552 record_block(s, block[7], 7);
553 }
554 }
555 } else {
556 if (s->chroma_format == CHROMA_444) {
557 encode_block(s, block[0], 0);
558 encode_block(s, block[2], 2);
559 encode_block(s, block[4], 4);
560 encode_block(s, block[8], 8);
561 encode_block(s, block[5], 5);
562 encode_block(s, block[9], 9);
563
564 if (16*s->mb_x+8 < s->width) {
565 encode_block(s, block[1], 1);
566 encode_block(s, block[3], 3);
567 encode_block(s, block[6], 6);
568 encode_block(s, block[10], 10);
569 encode_block(s, block[7], 7);
570 encode_block(s, block[11], 11);
571 }
572 } else {
573 for(i=0;i<5;i++) {
574 encode_block(s, block[i], i);
575 }
576 if (s->chroma_format == CHROMA_420) {
577 encode_block(s, block[5], 5);
578 } else {
579 encode_block(s, block[6], 6);
580 encode_block(s, block[5], 5);
581 encode_block(s, block[7], 7);
582 }
583 }
584
585 s->i_tex_bits += get_bits_diff(s);
586 }
587 }
588
589 #if CONFIG_AMV_ENCODER
590 // maximum over s->mjpeg_vsample[i]
591 #define V_MAX 2
amv_encode_picture(AVCodecContext * avctx,AVPacket * pkt,const AVFrame * pic_arg,int * got_packet)592 static int amv_encode_picture(AVCodecContext *avctx, AVPacket *pkt,
593 const AVFrame *pic_arg, int *got_packet)
594 {
595 MpegEncContext *s = avctx->priv_data;
596 AVFrame *pic;
597 int i, ret;
598 int chroma_h_shift, chroma_v_shift;
599
600 av_pix_fmt_get_chroma_sub_sample(avctx->pix_fmt, &chroma_h_shift, &chroma_v_shift);
601
602 if ((avctx->height & 15) && avctx->strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL) {
603 av_log(avctx, AV_LOG_ERROR,
604 "Heights which are not a multiple of 16 might fail with some decoders, "
605 "use vstrict=-1 / -strict -1 to use %d anyway.\n", avctx->height);
606 av_log(avctx, AV_LOG_WARNING, "If you have a device that plays AMV videos, please test if videos "
607 "with such heights work with it and report your findings to ffmpeg-devel@ffmpeg.org\n");
608 return AVERROR_EXPERIMENTAL;
609 }
610
611 pic = av_frame_clone(pic_arg);
612 if (!pic)
613 return AVERROR(ENOMEM);
614 //picture should be flipped upside-down
615 for(i=0; i < 3; i++) {
616 int vsample = i ? 2 >> chroma_v_shift : 2;
617 pic->data[i] += pic->linesize[i] * (vsample * s->height / V_MAX - 1);
618 pic->linesize[i] *= -1;
619 }
620 ret = ff_mpv_encode_picture(avctx, pkt, pic, got_packet);
621 av_frame_free(&pic);
622 return ret;
623 }
624 #endif
625
626 #define OFFSET(x) offsetof(MJPEGEncContext, mjpeg.x)
627 #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
628 static const AVOption options[] = {
629 FF_MPV_COMMON_OPTS
630 { "huffman", "Huffman table strategy", OFFSET(huffman), AV_OPT_TYPE_INT, { .i64 = HUFFMAN_TABLE_OPTIMAL }, 0, NB_HUFFMAN_TABLE_OPTION - 1, VE, "huffman" },
631 { "default", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = HUFFMAN_TABLE_DEFAULT }, INT_MIN, INT_MAX, VE, "huffman" },
632 { "optimal", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = HUFFMAN_TABLE_OPTIMAL }, INT_MIN, INT_MAX, VE, "huffman" },
633 { "force_duplicated_matrix", "Always write luma and chroma matrix for mjpeg, useful for rtp streaming.", OFFSET(force_duplicated_matrix), AV_OPT_TYPE_BOOL, {.i64 = 0 }, 0, 1, VE },
634 { NULL},
635 };
636
637 #if CONFIG_MJPEG_ENCODER
638 static const AVClass mjpeg_class = {
639 .class_name = "mjpeg encoder",
640 .item_name = av_default_item_name,
641 .option = options,
642 .version = LIBAVUTIL_VERSION_INT,
643 };
644
645 const FFCodec ff_mjpeg_encoder = {
646 .p.name = "mjpeg",
647 .p.long_name = NULL_IF_CONFIG_SMALL("MJPEG (Motion JPEG)"),
648 .p.type = AVMEDIA_TYPE_VIDEO,
649 .p.id = AV_CODEC_ID_MJPEG,
650 .priv_data_size = sizeof(MJPEGEncContext),
651 .init = ff_mpv_encode_init,
652 FF_CODEC_ENCODE_CB(ff_mpv_encode_picture),
653 .close = mjpeg_encode_close,
654 .p.capabilities = AV_CODEC_CAP_SLICE_THREADS | AV_CODEC_CAP_FRAME_THREADS,
655 .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE | FF_CODEC_CAP_INIT_CLEANUP,
656 .p.pix_fmts = (const enum AVPixelFormat[]) {
657 AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVJ444P,
658 AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV444P,
659 AV_PIX_FMT_NONE
660 },
661 .p.priv_class = &mjpeg_class,
662 .p.profiles = NULL_IF_CONFIG_SMALL(ff_mjpeg_profiles),
663 };
664 #endif
665
666 #if CONFIG_AMV_ENCODER
667 static const AVClass amv_class = {
668 .class_name = "amv encoder",
669 .item_name = av_default_item_name,
670 .option = options,
671 .version = LIBAVUTIL_VERSION_INT,
672 };
673
674 const FFCodec ff_amv_encoder = {
675 .p.name = "amv",
676 .p.long_name = NULL_IF_CONFIG_SMALL("AMV Video"),
677 .p.type = AVMEDIA_TYPE_VIDEO,
678 .p.id = AV_CODEC_ID_AMV,
679 .priv_data_size = sizeof(MJPEGEncContext),
680 .init = ff_mpv_encode_init,
681 FF_CODEC_ENCODE_CB(amv_encode_picture),
682 .close = mjpeg_encode_close,
683 .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE | FF_CODEC_CAP_INIT_CLEANUP,
684 .p.pix_fmts = (const enum AVPixelFormat[]) {
685 AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_NONE
686 },
687 .p.priv_class = &amv_class,
688 };
689 #endif
690