1 /*
2 * PNG image format
3 * Copyright (c) 2003 Fabrice Bellard
4 *
5 * This file is part of FFmpeg.
6 *
7 * FFmpeg is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
11 *
12 * FFmpeg is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with FFmpeg; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20 */
21
22 #include "avcodec.h"
23 #include "codec_internal.h"
24 #include "encode.h"
25 #include "bytestream.h"
26 #include "lossless_videoencdsp.h"
27 #include "png.h"
28 #include "apng.h"
29 #include "zlib_wrapper.h"
30
31 #include "libavutil/avassert.h"
32 #include "libavutil/crc.h"
33 #include "libavutil/libm.h"
34 #include "libavutil/opt.h"
35 #include "libavutil/color_utils.h"
36 #include "libavutil/stereo3d.h"
37
38 #include <zlib.h>
39
40 #define IOBUF_SIZE 4096
41
42 typedef struct APNGFctlChunk {
43 uint32_t sequence_number;
44 uint32_t width, height;
45 uint32_t x_offset, y_offset;
46 uint16_t delay_num, delay_den;
47 uint8_t dispose_op, blend_op;
48 } APNGFctlChunk;
49
50 typedef struct PNGEncContext {
51 AVClass *class;
52 LLVidEncDSPContext llvidencdsp;
53
54 uint8_t *bytestream;
55 uint8_t *bytestream_start;
56 uint8_t *bytestream_end;
57
58 int filter_type;
59
60 FFZStream zstream;
61 uint8_t buf[IOBUF_SIZE];
62 int dpi; ///< Physical pixel density, in dots per inch, if set
63 int dpm; ///< Physical pixel density, in dots per meter, if set
64
65 int is_progressive;
66 int bit_depth;
67 int color_type;
68 int bits_per_pixel;
69
70 // APNG
71 uint32_t palette_checksum; // Used to ensure a single unique palette
72 uint32_t sequence_number;
73 int extra_data_updated;
74 uint8_t *extra_data;
75 int extra_data_size;
76
77 AVFrame *prev_frame;
78 AVFrame *last_frame;
79 APNGFctlChunk last_frame_fctl;
80 uint8_t *last_frame_packet;
81 size_t last_frame_packet_size;
82 } PNGEncContext;
83
png_get_interlaced_row(uint8_t * dst,int row_size,int bits_per_pixel,int pass,const uint8_t * src,int width)84 static void png_get_interlaced_row(uint8_t *dst, int row_size,
85 int bits_per_pixel, int pass,
86 const uint8_t *src, int width)
87 {
88 int x, mask, dst_x, j, b, bpp;
89 uint8_t *d;
90 const uint8_t *s;
91 static const int masks[] = {0x80, 0x08, 0x88, 0x22, 0xaa, 0x55, 0xff};
92
93 mask = masks[pass];
94 switch (bits_per_pixel) {
95 case 1:
96 memset(dst, 0, row_size);
97 dst_x = 0;
98 for (x = 0; x < width; x++) {
99 j = (x & 7);
100 if ((mask << j) & 0x80) {
101 b = (src[x >> 3] >> (7 - j)) & 1;
102 dst[dst_x >> 3] |= b << (7 - (dst_x & 7));
103 dst_x++;
104 }
105 }
106 break;
107 default:
108 bpp = bits_per_pixel >> 3;
109 d = dst;
110 s = src;
111 for (x = 0; x < width; x++) {
112 j = x & 7;
113 if ((mask << j) & 0x80) {
114 memcpy(d, s, bpp);
115 d += bpp;
116 }
117 s += bpp;
118 }
119 break;
120 }
121 }
122
sub_png_paeth_prediction(uint8_t * dst,uint8_t * src,uint8_t * top,int w,int bpp)123 static void sub_png_paeth_prediction(uint8_t *dst, uint8_t *src, uint8_t *top,
124 int w, int bpp)
125 {
126 int i;
127 for (i = 0; i < w; i++) {
128 int a, b, c, p, pa, pb, pc;
129
130 a = src[i - bpp];
131 b = top[i];
132 c = top[i - bpp];
133
134 p = b - c;
135 pc = a - c;
136
137 pa = abs(p);
138 pb = abs(pc);
139 pc = abs(p + pc);
140
141 if (pa <= pb && pa <= pc)
142 p = a;
143 else if (pb <= pc)
144 p = b;
145 else
146 p = c;
147 dst[i] = src[i] - p;
148 }
149 }
150
sub_left_prediction(PNGEncContext * c,uint8_t * dst,const uint8_t * src,int bpp,int size)151 static void sub_left_prediction(PNGEncContext *c, uint8_t *dst, const uint8_t *src, int bpp, int size)
152 {
153 const uint8_t *src1 = src + bpp;
154 const uint8_t *src2 = src;
155 int x, unaligned_w;
156
157 memcpy(dst, src, bpp);
158 dst += bpp;
159 size -= bpp;
160 unaligned_w = FFMIN(32 - bpp, size);
161 for (x = 0; x < unaligned_w; x++)
162 *dst++ = *src1++ - *src2++;
163 size -= unaligned_w;
164 c->llvidencdsp.diff_bytes(dst, src1, src2, size);
165 }
166
png_filter_row(PNGEncContext * c,uint8_t * dst,int filter_type,uint8_t * src,uint8_t * top,int size,int bpp)167 static void png_filter_row(PNGEncContext *c, uint8_t *dst, int filter_type,
168 uint8_t *src, uint8_t *top, int size, int bpp)
169 {
170 int i;
171
172 switch (filter_type) {
173 case PNG_FILTER_VALUE_NONE:
174 memcpy(dst, src, size);
175 break;
176 case PNG_FILTER_VALUE_SUB:
177 sub_left_prediction(c, dst, src, bpp, size);
178 break;
179 case PNG_FILTER_VALUE_UP:
180 c->llvidencdsp.diff_bytes(dst, src, top, size);
181 break;
182 case PNG_FILTER_VALUE_AVG:
183 for (i = 0; i < bpp; i++)
184 dst[i] = src[i] - (top[i] >> 1);
185 for (; i < size; i++)
186 dst[i] = src[i] - ((src[i - bpp] + top[i]) >> 1);
187 break;
188 case PNG_FILTER_VALUE_PAETH:
189 for (i = 0; i < bpp; i++)
190 dst[i] = src[i] - top[i];
191 sub_png_paeth_prediction(dst + i, src + i, top + i, size - i, bpp);
192 break;
193 }
194 }
195
png_choose_filter(PNGEncContext * s,uint8_t * dst,uint8_t * src,uint8_t * top,int size,int bpp)196 static uint8_t *png_choose_filter(PNGEncContext *s, uint8_t *dst,
197 uint8_t *src, uint8_t *top, int size, int bpp)
198 {
199 int pred = s->filter_type;
200 av_assert0(bpp || !pred);
201 if (!top && pred)
202 pred = PNG_FILTER_VALUE_SUB;
203 if (pred == PNG_FILTER_VALUE_MIXED) {
204 int i;
205 int cost, bcost = INT_MAX;
206 uint8_t *buf1 = dst, *buf2 = dst + size + 16;
207 for (pred = 0; pred < 5; pred++) {
208 png_filter_row(s, buf1 + 1, pred, src, top, size, bpp);
209 buf1[0] = pred;
210 cost = 0;
211 for (i = 0; i <= size; i++)
212 cost += abs((int8_t) buf1[i]);
213 if (cost < bcost) {
214 bcost = cost;
215 FFSWAP(uint8_t *, buf1, buf2);
216 }
217 }
218 return buf2;
219 } else {
220 png_filter_row(s, dst + 1, pred, src, top, size, bpp);
221 dst[0] = pred;
222 return dst;
223 }
224 }
225
png_write_chunk(uint8_t ** f,uint32_t tag,const uint8_t * buf,int length)226 static void png_write_chunk(uint8_t **f, uint32_t tag,
227 const uint8_t *buf, int length)
228 {
229 const AVCRC *crc_table = av_crc_get_table(AV_CRC_32_IEEE_LE);
230 uint32_t crc = ~0U;
231 uint8_t tagbuf[4];
232
233 bytestream_put_be32(f, length);
234 AV_WL32(tagbuf, tag);
235 crc = av_crc(crc_table, crc, tagbuf, 4);
236 bytestream_put_be32(f, av_bswap32(tag));
237 if (length > 0) {
238 crc = av_crc(crc_table, crc, buf, length);
239 if (*f != buf)
240 memcpy(*f, buf, length);
241 *f += length;
242 }
243 bytestream_put_be32(f, ~crc);
244 }
245
png_write_image_data(AVCodecContext * avctx,const uint8_t * buf,int length)246 static void png_write_image_data(AVCodecContext *avctx,
247 const uint8_t *buf, int length)
248 {
249 PNGEncContext *s = avctx->priv_data;
250 const AVCRC *crc_table = av_crc_get_table(AV_CRC_32_IEEE_LE);
251 uint32_t crc = ~0U;
252
253 if (avctx->codec_id == AV_CODEC_ID_PNG || avctx->frame_number == 0) {
254 png_write_chunk(&s->bytestream, MKTAG('I', 'D', 'A', 'T'), buf, length);
255 return;
256 }
257
258 bytestream_put_be32(&s->bytestream, length + 4);
259
260 bytestream_put_be32(&s->bytestream, MKBETAG('f', 'd', 'A', 'T'));
261 bytestream_put_be32(&s->bytestream, s->sequence_number);
262 crc = av_crc(crc_table, crc, s->bytestream - 8, 8);
263
264 crc = av_crc(crc_table, crc, buf, length);
265 memcpy(s->bytestream, buf, length);
266 s->bytestream += length;
267
268 bytestream_put_be32(&s->bytestream, ~crc);
269
270 ++s->sequence_number;
271 }
272
273 /* XXX: do filtering */
png_write_row(AVCodecContext * avctx,const uint8_t * data,int size)274 static int png_write_row(AVCodecContext *avctx, const uint8_t *data, int size)
275 {
276 PNGEncContext *s = avctx->priv_data;
277 z_stream *const zstream = &s->zstream.zstream;
278 int ret;
279
280 zstream->avail_in = size;
281 zstream->next_in = data;
282 while (zstream->avail_in > 0) {
283 ret = deflate(zstream, Z_NO_FLUSH);
284 if (ret != Z_OK)
285 return -1;
286 if (zstream->avail_out == 0) {
287 if (s->bytestream_end - s->bytestream > IOBUF_SIZE + 100)
288 png_write_image_data(avctx, s->buf, IOBUF_SIZE);
289 zstream->avail_out = IOBUF_SIZE;
290 zstream->next_out = s->buf;
291 }
292 }
293 return 0;
294 }
295
296 #define AV_WB32_PNG(buf, n) AV_WB32(buf, lrint((n) * 100000))
png_get_chrm(enum AVColorPrimaries prim,uint8_t * buf)297 static int png_get_chrm(enum AVColorPrimaries prim, uint8_t *buf)
298 {
299 double rx, ry, gx, gy, bx, by, wx = 0.3127, wy = 0.3290;
300 switch (prim) {
301 case AVCOL_PRI_BT709:
302 rx = 0.640; ry = 0.330;
303 gx = 0.300; gy = 0.600;
304 bx = 0.150; by = 0.060;
305 break;
306 case AVCOL_PRI_BT470M:
307 rx = 0.670; ry = 0.330;
308 gx = 0.210; gy = 0.710;
309 bx = 0.140; by = 0.080;
310 wx = 0.310; wy = 0.316;
311 break;
312 case AVCOL_PRI_BT470BG:
313 rx = 0.640; ry = 0.330;
314 gx = 0.290; gy = 0.600;
315 bx = 0.150; by = 0.060;
316 break;
317 case AVCOL_PRI_SMPTE170M:
318 case AVCOL_PRI_SMPTE240M:
319 rx = 0.630; ry = 0.340;
320 gx = 0.310; gy = 0.595;
321 bx = 0.155; by = 0.070;
322 break;
323 case AVCOL_PRI_BT2020:
324 rx = 0.708; ry = 0.292;
325 gx = 0.170; gy = 0.797;
326 bx = 0.131; by = 0.046;
327 break;
328 default:
329 return 0;
330 }
331
332 AV_WB32_PNG(buf , wx); AV_WB32_PNG(buf + 4 , wy);
333 AV_WB32_PNG(buf + 8 , rx); AV_WB32_PNG(buf + 12, ry);
334 AV_WB32_PNG(buf + 16, gx); AV_WB32_PNG(buf + 20, gy);
335 AV_WB32_PNG(buf + 24, bx); AV_WB32_PNG(buf + 28, by);
336 return 1;
337 }
338
png_get_gama(enum AVColorTransferCharacteristic trc,uint8_t * buf)339 static int png_get_gama(enum AVColorTransferCharacteristic trc, uint8_t *buf)
340 {
341 double gamma = avpriv_get_gamma_from_trc(trc);
342 if (gamma <= 1e-6)
343 return 0;
344
345 AV_WB32_PNG(buf, 1.0 / gamma);
346 return 1;
347 }
348
png_write_iccp(PNGEncContext * s,const AVFrameSideData * sd)349 static int png_write_iccp(PNGEncContext *s, const AVFrameSideData *sd)
350 {
351 z_stream *const zstream = &s->zstream.zstream;
352 const AVDictionaryEntry *entry;
353 const char *name;
354 uint8_t *start, *buf;
355 int ret;
356
357 if (!sd || !sd->size)
358 return 0;
359 zstream->next_in = sd->data;
360 zstream->avail_in = sd->size;
361
362 /* write the chunk contents first */
363 start = s->bytestream + 8; /* make room for iCCP tag + length */
364 buf = start;
365
366 /* profile description */
367 entry = av_dict_get(sd->metadata, "name", NULL, 0);
368 name = (entry && entry->value[0]) ? entry->value : "icc";
369 for (int i = 0;; i++) {
370 char c = (i == 79) ? 0 : name[i];
371 bytestream_put_byte(&buf, c);
372 if (!c)
373 break;
374 }
375
376 /* compression method and profile data */
377 bytestream_put_byte(&buf, 0);
378 zstream->next_out = buf;
379 zstream->avail_out = s->bytestream_end - buf;
380 ret = deflate(zstream, Z_FINISH);
381 deflateReset(zstream);
382 if (ret != Z_STREAM_END)
383 return AVERROR_EXTERNAL;
384
385 /* rewind to the start and write the chunk header/crc */
386 png_write_chunk(&s->bytestream, MKTAG('i', 'C', 'C', 'P'), start,
387 zstream->next_out - start);
388 return 0;
389 }
390
encode_headers(AVCodecContext * avctx,const AVFrame * pict)391 static int encode_headers(AVCodecContext *avctx, const AVFrame *pict)
392 {
393 AVFrameSideData *side_data;
394 PNGEncContext *s = avctx->priv_data;
395 int ret;
396
397 /* write png header */
398 AV_WB32(s->buf, avctx->width);
399 AV_WB32(s->buf + 4, avctx->height);
400 s->buf[8] = s->bit_depth;
401 s->buf[9] = s->color_type;
402 s->buf[10] = 0; /* compression type */
403 s->buf[11] = 0; /* filter type */
404 s->buf[12] = s->is_progressive; /* interlace type */
405 png_write_chunk(&s->bytestream, MKTAG('I', 'H', 'D', 'R'), s->buf, 13);
406
407 /* write physical information */
408 if (s->dpm) {
409 AV_WB32(s->buf, s->dpm);
410 AV_WB32(s->buf + 4, s->dpm);
411 s->buf[8] = 1; /* unit specifier is meter */
412 } else {
413 AV_WB32(s->buf, avctx->sample_aspect_ratio.num);
414 AV_WB32(s->buf + 4, avctx->sample_aspect_ratio.den);
415 s->buf[8] = 0; /* unit specifier is unknown */
416 }
417 png_write_chunk(&s->bytestream, MKTAG('p', 'H', 'Y', 's'), s->buf, 9);
418
419 /* write stereoscopic information */
420 side_data = av_frame_get_side_data(pict, AV_FRAME_DATA_STEREO3D);
421 if (side_data) {
422 AVStereo3D *stereo3d = (AVStereo3D *)side_data->data;
423 switch (stereo3d->type) {
424 case AV_STEREO3D_SIDEBYSIDE:
425 s->buf[0] = ((stereo3d->flags & AV_STEREO3D_FLAG_INVERT) == 0) ? 1 : 0;
426 png_write_chunk(&s->bytestream, MKTAG('s', 'T', 'E', 'R'), s->buf, 1);
427 break;
428 case AV_STEREO3D_2D:
429 break;
430 default:
431 av_log(avctx, AV_LOG_WARNING, "Only side-by-side stereo3d flag can be defined within sTER chunk\n");
432 break;
433 }
434 }
435
436 /* write colorspace information */
437 if (pict->color_primaries == AVCOL_PRI_BT709 &&
438 pict->color_trc == AVCOL_TRC_IEC61966_2_1) {
439 s->buf[0] = 1; /* rendering intent, relative colorimetric by default */
440 png_write_chunk(&s->bytestream, MKTAG('s', 'R', 'G', 'B'), s->buf, 1);
441 }
442
443 if (png_get_chrm(pict->color_primaries, s->buf))
444 png_write_chunk(&s->bytestream, MKTAG('c', 'H', 'R', 'M'), s->buf, 32);
445 if (png_get_gama(pict->color_trc, s->buf))
446 png_write_chunk(&s->bytestream, MKTAG('g', 'A', 'M', 'A'), s->buf, 4);
447
448 side_data = av_frame_get_side_data(pict, AV_FRAME_DATA_ICC_PROFILE);
449 if ((ret = png_write_iccp(s, side_data)))
450 return ret;
451
452 /* put the palette if needed, must be after colorspace information */
453 if (s->color_type == PNG_COLOR_TYPE_PALETTE) {
454 int has_alpha, alpha, i;
455 unsigned int v;
456 uint32_t *palette;
457 uint8_t *ptr, *alpha_ptr;
458
459 palette = (uint32_t *)pict->data[1];
460 ptr = s->buf;
461 alpha_ptr = s->buf + 256 * 3;
462 has_alpha = 0;
463 for (i = 0; i < 256; i++) {
464 v = palette[i];
465 alpha = v >> 24;
466 if (alpha != 0xff)
467 has_alpha = 1;
468 *alpha_ptr++ = alpha;
469 bytestream_put_be24(&ptr, v);
470 }
471 png_write_chunk(&s->bytestream,
472 MKTAG('P', 'L', 'T', 'E'), s->buf, 256 * 3);
473 if (has_alpha) {
474 png_write_chunk(&s->bytestream,
475 MKTAG('t', 'R', 'N', 'S'), s->buf + 256 * 3, 256);
476 }
477 }
478
479 return 0;
480 }
481
encode_frame(AVCodecContext * avctx,const AVFrame * pict)482 static int encode_frame(AVCodecContext *avctx, const AVFrame *pict)
483 {
484 PNGEncContext *s = avctx->priv_data;
485 z_stream *const zstream = &s->zstream.zstream;
486 const AVFrame *const p = pict;
487 int y, len, ret;
488 int row_size, pass_row_size;
489 uint8_t *ptr, *top, *crow_buf, *crow;
490 uint8_t *crow_base = NULL;
491 uint8_t *progressive_buf = NULL;
492 uint8_t *top_buf = NULL;
493
494 row_size = (pict->width * s->bits_per_pixel + 7) >> 3;
495
496 crow_base = av_malloc((row_size + 32) << (s->filter_type == PNG_FILTER_VALUE_MIXED));
497 if (!crow_base) {
498 ret = AVERROR(ENOMEM);
499 goto the_end;
500 }
501 // pixel data should be aligned, but there's a control byte before it
502 crow_buf = crow_base + 15;
503 if (s->is_progressive) {
504 progressive_buf = av_malloc(row_size + 1);
505 top_buf = av_malloc(row_size + 1);
506 if (!progressive_buf || !top_buf) {
507 ret = AVERROR(ENOMEM);
508 goto the_end;
509 }
510 }
511
512 /* put each row */
513 zstream->avail_out = IOBUF_SIZE;
514 zstream->next_out = s->buf;
515 if (s->is_progressive) {
516 int pass;
517
518 for (pass = 0; pass < NB_PASSES; pass++) {
519 /* NOTE: a pass is completely omitted if no pixels would be
520 * output */
521 pass_row_size = ff_png_pass_row_size(pass, s->bits_per_pixel, pict->width);
522 if (pass_row_size > 0) {
523 top = NULL;
524 for (y = 0; y < pict->height; y++)
525 if ((ff_png_pass_ymask[pass] << (y & 7)) & 0x80) {
526 ptr = p->data[0] + y * p->linesize[0];
527 FFSWAP(uint8_t *, progressive_buf, top_buf);
528 png_get_interlaced_row(progressive_buf, pass_row_size,
529 s->bits_per_pixel, pass,
530 ptr, pict->width);
531 crow = png_choose_filter(s, crow_buf, progressive_buf,
532 top, pass_row_size, s->bits_per_pixel >> 3);
533 png_write_row(avctx, crow, pass_row_size + 1);
534 top = progressive_buf;
535 }
536 }
537 }
538 } else {
539 top = NULL;
540 for (y = 0; y < pict->height; y++) {
541 ptr = p->data[0] + y * p->linesize[0];
542 crow = png_choose_filter(s, crow_buf, ptr, top,
543 row_size, s->bits_per_pixel >> 3);
544 png_write_row(avctx, crow, row_size + 1);
545 top = ptr;
546 }
547 }
548 /* compress last bytes */
549 for (;;) {
550 ret = deflate(zstream, Z_FINISH);
551 if (ret == Z_OK || ret == Z_STREAM_END) {
552 len = IOBUF_SIZE - zstream->avail_out;
553 if (len > 0 && s->bytestream_end - s->bytestream > len + 100) {
554 png_write_image_data(avctx, s->buf, len);
555 }
556 zstream->avail_out = IOBUF_SIZE;
557 zstream->next_out = s->buf;
558 if (ret == Z_STREAM_END)
559 break;
560 } else {
561 ret = -1;
562 goto the_end;
563 }
564 }
565
566 ret = 0;
567
568 the_end:
569 av_freep(&crow_base);
570 av_freep(&progressive_buf);
571 av_freep(&top_buf);
572 deflateReset(zstream);
573 return ret;
574 }
575
add_icc_profile_size(AVCodecContext * avctx,const AVFrame * pict,uint64_t * max_packet_size)576 static int add_icc_profile_size(AVCodecContext *avctx, const AVFrame *pict,
577 uint64_t *max_packet_size)
578 {
579 PNGEncContext *s = avctx->priv_data;
580 const AVFrameSideData *sd;
581 const int hdr_size = 128;
582 uint64_t new_pkt_size;
583 uLong bound;
584
585 if (!pict)
586 return 0;
587 sd = av_frame_get_side_data(pict, AV_FRAME_DATA_ICC_PROFILE);
588 if (!sd || !sd->size)
589 return 0;
590 if (sd->size != (uLong) sd->size)
591 return AVERROR_INVALIDDATA;
592
593 bound = deflateBound(&s->zstream.zstream, sd->size);
594 if (bound > INT32_MAX - hdr_size)
595 return AVERROR_INVALIDDATA;
596
597 new_pkt_size = *max_packet_size + bound + hdr_size;
598 if (new_pkt_size < *max_packet_size)
599 return AVERROR_INVALIDDATA;
600 *max_packet_size = new_pkt_size;
601 return 0;
602 }
603
encode_png(AVCodecContext * avctx,AVPacket * pkt,const AVFrame * pict,int * got_packet)604 static int encode_png(AVCodecContext *avctx, AVPacket *pkt,
605 const AVFrame *pict, int *got_packet)
606 {
607 PNGEncContext *s = avctx->priv_data;
608 int ret;
609 int enc_row_size;
610 uint64_t max_packet_size;
611
612 enc_row_size = deflateBound(&s->zstream.zstream,
613 (avctx->width * s->bits_per_pixel + 7) >> 3);
614 max_packet_size =
615 AV_INPUT_BUFFER_MIN_SIZE + // headers
616 avctx->height * (
617 enc_row_size +
618 12 * (((int64_t)enc_row_size + IOBUF_SIZE - 1) / IOBUF_SIZE) // IDAT * ceil(enc_row_size / IOBUF_SIZE)
619 );
620 if ((ret = add_icc_profile_size(avctx, pict, &max_packet_size)))
621 return ret;
622 ret = ff_alloc_packet(avctx, pkt, max_packet_size);
623 if (ret < 0)
624 return ret;
625
626 s->bytestream_start =
627 s->bytestream = pkt->data;
628 s->bytestream_end = pkt->data + pkt->size;
629
630 AV_WB64(s->bytestream, PNGSIG);
631 s->bytestream += 8;
632
633 ret = encode_headers(avctx, pict);
634 if (ret < 0)
635 return ret;
636
637 ret = encode_frame(avctx, pict);
638 if (ret < 0)
639 return ret;
640
641 png_write_chunk(&s->bytestream, MKTAG('I', 'E', 'N', 'D'), NULL, 0);
642
643 pkt->size = s->bytestream - s->bytestream_start;
644 pkt->flags |= AV_PKT_FLAG_KEY;
645 *got_packet = 1;
646
647 return 0;
648 }
649
apng_do_inverse_blend(AVFrame * output,const AVFrame * input,APNGFctlChunk * fctl_chunk,uint8_t bpp)650 static int apng_do_inverse_blend(AVFrame *output, const AVFrame *input,
651 APNGFctlChunk *fctl_chunk, uint8_t bpp)
652 {
653 // output: background, input: foreground
654 // output the image such that when blended with the background, will produce the foreground
655
656 unsigned int x, y;
657 unsigned int leftmost_x = input->width;
658 unsigned int rightmost_x = 0;
659 unsigned int topmost_y = input->height;
660 unsigned int bottommost_y = 0;
661 const uint8_t *input_data = input->data[0];
662 uint8_t *output_data = output->data[0];
663 ptrdiff_t input_linesize = input->linesize[0];
664 ptrdiff_t output_linesize = output->linesize[0];
665
666 // Find bounding box of changes
667 for (y = 0; y < input->height; ++y) {
668 for (x = 0; x < input->width; ++x) {
669 if (!memcmp(input_data + bpp * x, output_data + bpp * x, bpp))
670 continue;
671
672 if (x < leftmost_x)
673 leftmost_x = x;
674 if (x >= rightmost_x)
675 rightmost_x = x + 1;
676 if (y < topmost_y)
677 topmost_y = y;
678 if (y >= bottommost_y)
679 bottommost_y = y + 1;
680 }
681
682 input_data += input_linesize;
683 output_data += output_linesize;
684 }
685
686 if (leftmost_x == input->width && rightmost_x == 0) {
687 // Empty frame
688 // APNG does not support empty frames, so we make it a 1x1 frame
689 leftmost_x = topmost_y = 0;
690 rightmost_x = bottommost_y = 1;
691 }
692
693 // Do actual inverse blending
694 if (fctl_chunk->blend_op == APNG_BLEND_OP_SOURCE) {
695 output_data = output->data[0];
696 for (y = topmost_y; y < bottommost_y; ++y) {
697 memcpy(output_data,
698 input->data[0] + input_linesize * y + bpp * leftmost_x,
699 bpp * (rightmost_x - leftmost_x));
700 output_data += output_linesize;
701 }
702 } else { // APNG_BLEND_OP_OVER
703 size_t transparent_palette_index;
704 uint32_t *palette;
705
706 switch (input->format) {
707 case AV_PIX_FMT_RGBA64BE:
708 case AV_PIX_FMT_YA16BE:
709 case AV_PIX_FMT_RGBA:
710 case AV_PIX_FMT_GRAY8A:
711 break;
712
713 case AV_PIX_FMT_PAL8:
714 palette = (uint32_t*)input->data[1];
715 for (transparent_palette_index = 0; transparent_palette_index < 256; ++transparent_palette_index)
716 if (palette[transparent_palette_index] >> 24 == 0)
717 break;
718 break;
719
720 default:
721 // No alpha, so blending not possible
722 return -1;
723 }
724
725 for (y = topmost_y; y < bottommost_y; ++y) {
726 uint8_t *foreground = input->data[0] + input_linesize * y + bpp * leftmost_x;
727 uint8_t *background = output->data[0] + output_linesize * y + bpp * leftmost_x;
728 output_data = output->data[0] + output_linesize * (y - topmost_y);
729 for (x = leftmost_x; x < rightmost_x; ++x, foreground += bpp, background += bpp, output_data += bpp) {
730 if (!memcmp(foreground, background, bpp)) {
731 if (input->format == AV_PIX_FMT_PAL8) {
732 if (transparent_palette_index == 256) {
733 // Need fully transparent colour, but none exists
734 return -1;
735 }
736
737 *output_data = transparent_palette_index;
738 } else {
739 memset(output_data, 0, bpp);
740 }
741 continue;
742 }
743
744 // Check for special alpha values, since full inverse
745 // alpha-on-alpha blending is rarely possible, and when
746 // possible, doesn't compress much better than
747 // APNG_BLEND_OP_SOURCE blending
748 switch (input->format) {
749 case AV_PIX_FMT_RGBA64BE:
750 if (((uint16_t*)foreground)[3] == 0xffff ||
751 ((uint16_t*)background)[3] == 0)
752 break;
753 return -1;
754
755 case AV_PIX_FMT_YA16BE:
756 if (((uint16_t*)foreground)[1] == 0xffff ||
757 ((uint16_t*)background)[1] == 0)
758 break;
759 return -1;
760
761 case AV_PIX_FMT_RGBA:
762 if (foreground[3] == 0xff || background[3] == 0)
763 break;
764 return -1;
765
766 case AV_PIX_FMT_GRAY8A:
767 if (foreground[1] == 0xff || background[1] == 0)
768 break;
769 return -1;
770
771 case AV_PIX_FMT_PAL8:
772 if (palette[*foreground] >> 24 == 0xff ||
773 palette[*background] >> 24 == 0)
774 break;
775 return -1;
776 }
777
778 memmove(output_data, foreground, bpp);
779 }
780 }
781 }
782
783 output->width = rightmost_x - leftmost_x;
784 output->height = bottommost_y - topmost_y;
785 fctl_chunk->width = output->width;
786 fctl_chunk->height = output->height;
787 fctl_chunk->x_offset = leftmost_x;
788 fctl_chunk->y_offset = topmost_y;
789
790 return 0;
791 }
792
apng_encode_frame(AVCodecContext * avctx,const AVFrame * pict,APNGFctlChunk * best_fctl_chunk,APNGFctlChunk * best_last_fctl_chunk)793 static int apng_encode_frame(AVCodecContext *avctx, const AVFrame *pict,
794 APNGFctlChunk *best_fctl_chunk, APNGFctlChunk *best_last_fctl_chunk)
795 {
796 PNGEncContext *s = avctx->priv_data;
797 int ret;
798 unsigned int y;
799 AVFrame* diffFrame;
800 uint8_t bpp = (s->bits_per_pixel + 7) >> 3;
801 uint8_t *original_bytestream, *original_bytestream_end;
802 uint8_t *temp_bytestream = 0, *temp_bytestream_end;
803 uint32_t best_sequence_number;
804 uint8_t *best_bytestream;
805 size_t best_bytestream_size = SIZE_MAX;
806 APNGFctlChunk last_fctl_chunk = *best_last_fctl_chunk;
807 APNGFctlChunk fctl_chunk = *best_fctl_chunk;
808
809 if (avctx->frame_number == 0) {
810 best_fctl_chunk->width = pict->width;
811 best_fctl_chunk->height = pict->height;
812 best_fctl_chunk->x_offset = 0;
813 best_fctl_chunk->y_offset = 0;
814 best_fctl_chunk->blend_op = APNG_BLEND_OP_SOURCE;
815 return encode_frame(avctx, pict);
816 }
817
818 diffFrame = av_frame_alloc();
819 if (!diffFrame)
820 return AVERROR(ENOMEM);
821
822 diffFrame->format = pict->format;
823 diffFrame->width = pict->width;
824 diffFrame->height = pict->height;
825 if ((ret = av_frame_get_buffer(diffFrame, 0)) < 0)
826 goto fail;
827
828 original_bytestream = s->bytestream;
829 original_bytestream_end = s->bytestream_end;
830
831 temp_bytestream = av_malloc(original_bytestream_end - original_bytestream);
832 if (!temp_bytestream) {
833 ret = AVERROR(ENOMEM);
834 goto fail;
835 }
836 temp_bytestream_end = temp_bytestream + (original_bytestream_end - original_bytestream);
837
838 for (last_fctl_chunk.dispose_op = 0; last_fctl_chunk.dispose_op < 3; ++last_fctl_chunk.dispose_op) {
839 // 0: APNG_DISPOSE_OP_NONE
840 // 1: APNG_DISPOSE_OP_BACKGROUND
841 // 2: APNG_DISPOSE_OP_PREVIOUS
842
843 for (fctl_chunk.blend_op = 0; fctl_chunk.blend_op < 2; ++fctl_chunk.blend_op) {
844 // 0: APNG_BLEND_OP_SOURCE
845 // 1: APNG_BLEND_OP_OVER
846
847 uint32_t original_sequence_number = s->sequence_number, sequence_number;
848 uint8_t *bytestream_start = s->bytestream;
849 size_t bytestream_size;
850
851 // Do disposal
852 if (last_fctl_chunk.dispose_op != APNG_DISPOSE_OP_PREVIOUS) {
853 diffFrame->width = pict->width;
854 diffFrame->height = pict->height;
855 ret = av_frame_copy(diffFrame, s->last_frame);
856 if (ret < 0)
857 goto fail;
858
859 if (last_fctl_chunk.dispose_op == APNG_DISPOSE_OP_BACKGROUND) {
860 for (y = last_fctl_chunk.y_offset; y < last_fctl_chunk.y_offset + last_fctl_chunk.height; ++y) {
861 size_t row_start = diffFrame->linesize[0] * y + bpp * last_fctl_chunk.x_offset;
862 memset(diffFrame->data[0] + row_start, 0, bpp * last_fctl_chunk.width);
863 }
864 }
865 } else {
866 if (!s->prev_frame)
867 continue;
868
869 diffFrame->width = pict->width;
870 diffFrame->height = pict->height;
871 ret = av_frame_copy(diffFrame, s->prev_frame);
872 if (ret < 0)
873 goto fail;
874 }
875
876 // Do inverse blending
877 if (apng_do_inverse_blend(diffFrame, pict, &fctl_chunk, bpp) < 0)
878 continue;
879
880 // Do encoding
881 ret = encode_frame(avctx, diffFrame);
882 sequence_number = s->sequence_number;
883 s->sequence_number = original_sequence_number;
884 bytestream_size = s->bytestream - bytestream_start;
885 s->bytestream = bytestream_start;
886 if (ret < 0)
887 goto fail;
888
889 if (bytestream_size < best_bytestream_size) {
890 *best_fctl_chunk = fctl_chunk;
891 *best_last_fctl_chunk = last_fctl_chunk;
892
893 best_sequence_number = sequence_number;
894 best_bytestream = s->bytestream;
895 best_bytestream_size = bytestream_size;
896
897 if (best_bytestream == original_bytestream) {
898 s->bytestream = temp_bytestream;
899 s->bytestream_end = temp_bytestream_end;
900 } else {
901 s->bytestream = original_bytestream;
902 s->bytestream_end = original_bytestream_end;
903 }
904 }
905 }
906 }
907
908 s->sequence_number = best_sequence_number;
909 s->bytestream = original_bytestream + best_bytestream_size;
910 s->bytestream_end = original_bytestream_end;
911 if (best_bytestream != original_bytestream)
912 memcpy(original_bytestream, best_bytestream, best_bytestream_size);
913
914 ret = 0;
915
916 fail:
917 av_freep(&temp_bytestream);
918 av_frame_free(&diffFrame);
919 return ret;
920 }
921
encode_apng(AVCodecContext * avctx,AVPacket * pkt,const AVFrame * pict,int * got_packet)922 static int encode_apng(AVCodecContext *avctx, AVPacket *pkt,
923 const AVFrame *pict, int *got_packet)
924 {
925 PNGEncContext *s = avctx->priv_data;
926 int ret;
927 int enc_row_size;
928 uint64_t max_packet_size;
929 APNGFctlChunk fctl_chunk = {0};
930
931 if (pict && s->color_type == PNG_COLOR_TYPE_PALETTE) {
932 uint32_t checksum = ~av_crc(av_crc_get_table(AV_CRC_32_IEEE_LE), ~0U, pict->data[1], 256 * sizeof(uint32_t));
933
934 if (avctx->frame_number == 0) {
935 s->palette_checksum = checksum;
936 } else if (checksum != s->palette_checksum) {
937 av_log(avctx, AV_LOG_ERROR,
938 "Input contains more than one unique palette. APNG does not support multiple palettes.\n");
939 return -1;
940 }
941 }
942
943 enc_row_size = deflateBound(&s->zstream.zstream,
944 (avctx->width * s->bits_per_pixel + 7) >> 3);
945 max_packet_size =
946 AV_INPUT_BUFFER_MIN_SIZE + // headers
947 avctx->height * (
948 enc_row_size +
949 (4 + 12) * (((int64_t)enc_row_size + IOBUF_SIZE - 1) / IOBUF_SIZE) // fdAT * ceil(enc_row_size / IOBUF_SIZE)
950 );
951 if ((ret = add_icc_profile_size(avctx, pict, &max_packet_size)))
952 return ret;
953 if (max_packet_size > INT_MAX)
954 return AVERROR(ENOMEM);
955
956 if (avctx->frame_number == 0) {
957 if (!pict)
958 return AVERROR(EINVAL);
959
960 s->bytestream = s->extra_data = av_malloc(AV_INPUT_BUFFER_MIN_SIZE);
961 if (!s->extra_data)
962 return AVERROR(ENOMEM);
963
964 ret = encode_headers(avctx, pict);
965 if (ret < 0)
966 return ret;
967
968 s->extra_data_size = s->bytestream - s->extra_data;
969
970 s->last_frame_packet = av_malloc(max_packet_size);
971 if (!s->last_frame_packet)
972 return AVERROR(ENOMEM);
973 } else if (s->last_frame) {
974 ret = ff_get_encode_buffer(avctx, pkt, s->last_frame_packet_size, 0);
975 if (ret < 0)
976 return ret;
977
978 memcpy(pkt->data, s->last_frame_packet, s->last_frame_packet_size);
979 pkt->pts = pkt->dts = s->last_frame->pts;
980 }
981
982 if (pict) {
983 s->bytestream_start =
984 s->bytestream = s->last_frame_packet;
985 s->bytestream_end = s->bytestream + max_packet_size;
986
987 // We're encoding the frame first, so we have to do a bit of shuffling around
988 // to have the image data write to the correct place in the buffer
989 fctl_chunk.sequence_number = s->sequence_number;
990 ++s->sequence_number;
991 s->bytestream += APNG_FCTL_CHUNK_SIZE + 12;
992
993 ret = apng_encode_frame(avctx, pict, &fctl_chunk, &s->last_frame_fctl);
994 if (ret < 0)
995 return ret;
996
997 fctl_chunk.delay_num = 0; // delay filled in during muxing
998 fctl_chunk.delay_den = 0;
999 } else {
1000 s->last_frame_fctl.dispose_op = APNG_DISPOSE_OP_NONE;
1001 }
1002
1003 if (s->last_frame) {
1004 uint8_t* last_fctl_chunk_start = pkt->data;
1005 uint8_t buf[APNG_FCTL_CHUNK_SIZE];
1006 if (!s->extra_data_updated) {
1007 uint8_t *side_data = av_packet_new_side_data(pkt, AV_PKT_DATA_NEW_EXTRADATA, s->extra_data_size);
1008 if (!side_data)
1009 return AVERROR(ENOMEM);
1010 memcpy(side_data, s->extra_data, s->extra_data_size);
1011 s->extra_data_updated = 1;
1012 }
1013
1014 AV_WB32(buf + 0, s->last_frame_fctl.sequence_number);
1015 AV_WB32(buf + 4, s->last_frame_fctl.width);
1016 AV_WB32(buf + 8, s->last_frame_fctl.height);
1017 AV_WB32(buf + 12, s->last_frame_fctl.x_offset);
1018 AV_WB32(buf + 16, s->last_frame_fctl.y_offset);
1019 AV_WB16(buf + 20, s->last_frame_fctl.delay_num);
1020 AV_WB16(buf + 22, s->last_frame_fctl.delay_den);
1021 buf[24] = s->last_frame_fctl.dispose_op;
1022 buf[25] = s->last_frame_fctl.blend_op;
1023 png_write_chunk(&last_fctl_chunk_start, MKTAG('f', 'c', 'T', 'L'), buf, sizeof(buf));
1024
1025 *got_packet = 1;
1026 }
1027
1028 if (pict) {
1029 if (!s->last_frame) {
1030 s->last_frame = av_frame_alloc();
1031 if (!s->last_frame)
1032 return AVERROR(ENOMEM);
1033 } else if (s->last_frame_fctl.dispose_op != APNG_DISPOSE_OP_PREVIOUS) {
1034 if (!s->prev_frame) {
1035 s->prev_frame = av_frame_alloc();
1036 if (!s->prev_frame)
1037 return AVERROR(ENOMEM);
1038
1039 s->prev_frame->format = pict->format;
1040 s->prev_frame->width = pict->width;
1041 s->prev_frame->height = pict->height;
1042 if ((ret = av_frame_get_buffer(s->prev_frame, 0)) < 0)
1043 return ret;
1044 }
1045
1046 // Do disposal, but not blending
1047 av_frame_copy(s->prev_frame, s->last_frame);
1048 if (s->last_frame_fctl.dispose_op == APNG_DISPOSE_OP_BACKGROUND) {
1049 uint32_t y;
1050 uint8_t bpp = (s->bits_per_pixel + 7) >> 3;
1051 for (y = s->last_frame_fctl.y_offset; y < s->last_frame_fctl.y_offset + s->last_frame_fctl.height; ++y) {
1052 size_t row_start = s->prev_frame->linesize[0] * y + bpp * s->last_frame_fctl.x_offset;
1053 memset(s->prev_frame->data[0] + row_start, 0, bpp * s->last_frame_fctl.width);
1054 }
1055 }
1056 }
1057
1058 av_frame_unref(s->last_frame);
1059 ret = av_frame_ref(s->last_frame, (AVFrame*)pict);
1060 if (ret < 0)
1061 return ret;
1062
1063 s->last_frame_fctl = fctl_chunk;
1064 s->last_frame_packet_size = s->bytestream - s->bytestream_start;
1065 } else {
1066 av_frame_free(&s->last_frame);
1067 }
1068
1069 return 0;
1070 }
1071
png_enc_init(AVCodecContext * avctx)1072 static av_cold int png_enc_init(AVCodecContext *avctx)
1073 {
1074 PNGEncContext *s = avctx->priv_data;
1075 int compression_level;
1076
1077 switch (avctx->pix_fmt) {
1078 case AV_PIX_FMT_RGBA:
1079 avctx->bits_per_coded_sample = 32;
1080 break;
1081 case AV_PIX_FMT_RGB24:
1082 avctx->bits_per_coded_sample = 24;
1083 break;
1084 case AV_PIX_FMT_GRAY8:
1085 avctx->bits_per_coded_sample = 0x28;
1086 break;
1087 case AV_PIX_FMT_MONOBLACK:
1088 avctx->bits_per_coded_sample = 1;
1089 break;
1090 case AV_PIX_FMT_PAL8:
1091 avctx->bits_per_coded_sample = 8;
1092 }
1093
1094 ff_llvidencdsp_init(&s->llvidencdsp);
1095
1096 if (avctx->pix_fmt == AV_PIX_FMT_MONOBLACK)
1097 s->filter_type = PNG_FILTER_VALUE_NONE;
1098
1099 if (s->dpi && s->dpm) {
1100 av_log(avctx, AV_LOG_ERROR, "Only one of 'dpi' or 'dpm' options should be set\n");
1101 return AVERROR(EINVAL);
1102 } else if (s->dpi) {
1103 s->dpm = s->dpi * 10000 / 254;
1104 }
1105
1106 s->is_progressive = !!(avctx->flags & AV_CODEC_FLAG_INTERLACED_DCT);
1107 switch (avctx->pix_fmt) {
1108 case AV_PIX_FMT_RGBA64BE:
1109 s->bit_depth = 16;
1110 s->color_type = PNG_COLOR_TYPE_RGB_ALPHA;
1111 break;
1112 case AV_PIX_FMT_RGB48BE:
1113 s->bit_depth = 16;
1114 s->color_type = PNG_COLOR_TYPE_RGB;
1115 break;
1116 case AV_PIX_FMT_RGBA:
1117 s->bit_depth = 8;
1118 s->color_type = PNG_COLOR_TYPE_RGB_ALPHA;
1119 break;
1120 case AV_PIX_FMT_RGB24:
1121 s->bit_depth = 8;
1122 s->color_type = PNG_COLOR_TYPE_RGB;
1123 break;
1124 case AV_PIX_FMT_GRAY16BE:
1125 s->bit_depth = 16;
1126 s->color_type = PNG_COLOR_TYPE_GRAY;
1127 break;
1128 case AV_PIX_FMT_GRAY8:
1129 s->bit_depth = 8;
1130 s->color_type = PNG_COLOR_TYPE_GRAY;
1131 break;
1132 case AV_PIX_FMT_GRAY8A:
1133 s->bit_depth = 8;
1134 s->color_type = PNG_COLOR_TYPE_GRAY_ALPHA;
1135 break;
1136 case AV_PIX_FMT_YA16BE:
1137 s->bit_depth = 16;
1138 s->color_type = PNG_COLOR_TYPE_GRAY_ALPHA;
1139 break;
1140 case AV_PIX_FMT_MONOBLACK:
1141 s->bit_depth = 1;
1142 s->color_type = PNG_COLOR_TYPE_GRAY;
1143 break;
1144 case AV_PIX_FMT_PAL8:
1145 s->bit_depth = 8;
1146 s->color_type = PNG_COLOR_TYPE_PALETTE;
1147 break;
1148 default:
1149 return -1;
1150 }
1151 s->bits_per_pixel = ff_png_get_nb_channels(s->color_type) * s->bit_depth;
1152
1153 compression_level = avctx->compression_level == FF_COMPRESSION_DEFAULT
1154 ? Z_DEFAULT_COMPRESSION
1155 : av_clip(avctx->compression_level, 0, 9);
1156 return ff_deflate_init(&s->zstream, compression_level, avctx);
1157 }
1158
png_enc_close(AVCodecContext * avctx)1159 static av_cold int png_enc_close(AVCodecContext *avctx)
1160 {
1161 PNGEncContext *s = avctx->priv_data;
1162
1163 ff_deflate_end(&s->zstream);
1164 av_frame_free(&s->last_frame);
1165 av_frame_free(&s->prev_frame);
1166 av_freep(&s->last_frame_packet);
1167 av_freep(&s->extra_data);
1168 s->extra_data_size = 0;
1169 return 0;
1170 }
1171
1172 #define OFFSET(x) offsetof(PNGEncContext, x)
1173 #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
1174 static const AVOption options[] = {
1175 {"dpi", "Set image resolution (in dots per inch)", OFFSET(dpi), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 0x10000, VE},
1176 {"dpm", "Set image resolution (in dots per meter)", OFFSET(dpm), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 0x10000, VE},
1177 { "pred", "Prediction method", OFFSET(filter_type), AV_OPT_TYPE_INT, { .i64 = PNG_FILTER_VALUE_NONE }, PNG_FILTER_VALUE_NONE, PNG_FILTER_VALUE_MIXED, VE, "pred" },
1178 { "none", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_NONE }, INT_MIN, INT_MAX, VE, "pred" },
1179 { "sub", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_SUB }, INT_MIN, INT_MAX, VE, "pred" },
1180 { "up", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_UP }, INT_MIN, INT_MAX, VE, "pred" },
1181 { "avg", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_AVG }, INT_MIN, INT_MAX, VE, "pred" },
1182 { "paeth", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_PAETH }, INT_MIN, INT_MAX, VE, "pred" },
1183 { "mixed", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_MIXED }, INT_MIN, INT_MAX, VE, "pred" },
1184 { NULL},
1185 };
1186
1187 static const AVClass pngenc_class = {
1188 .class_name = "(A)PNG encoder",
1189 .item_name = av_default_item_name,
1190 .option = options,
1191 .version = LIBAVUTIL_VERSION_INT,
1192 };
1193
1194 const FFCodec ff_png_encoder = {
1195 .p.name = "png",
1196 .p.long_name = NULL_IF_CONFIG_SMALL("PNG (Portable Network Graphics) image"),
1197 .p.type = AVMEDIA_TYPE_VIDEO,
1198 .p.id = AV_CODEC_ID_PNG,
1199 .priv_data_size = sizeof(PNGEncContext),
1200 .init = png_enc_init,
1201 .close = png_enc_close,
1202 FF_CODEC_ENCODE_CB(encode_png),
1203 .p.capabilities = AV_CODEC_CAP_FRAME_THREADS,
1204 .p.pix_fmts = (const enum AVPixelFormat[]) {
1205 AV_PIX_FMT_RGB24, AV_PIX_FMT_RGBA,
1206 AV_PIX_FMT_RGB48BE, AV_PIX_FMT_RGBA64BE,
1207 AV_PIX_FMT_PAL8,
1208 AV_PIX_FMT_GRAY8, AV_PIX_FMT_GRAY8A,
1209 AV_PIX_FMT_GRAY16BE, AV_PIX_FMT_YA16BE,
1210 AV_PIX_FMT_MONOBLACK, AV_PIX_FMT_NONE
1211 },
1212 .p.priv_class = &pngenc_class,
1213 .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE,
1214 };
1215
1216 const FFCodec ff_apng_encoder = {
1217 .p.name = "apng",
1218 .p.long_name = NULL_IF_CONFIG_SMALL("APNG (Animated Portable Network Graphics) image"),
1219 .p.type = AVMEDIA_TYPE_VIDEO,
1220 .p.id = AV_CODEC_ID_APNG,
1221 .p.capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY,
1222 .priv_data_size = sizeof(PNGEncContext),
1223 .init = png_enc_init,
1224 .close = png_enc_close,
1225 FF_CODEC_ENCODE_CB(encode_apng),
1226 .p.pix_fmts = (const enum AVPixelFormat[]) {
1227 AV_PIX_FMT_RGB24, AV_PIX_FMT_RGBA,
1228 AV_PIX_FMT_RGB48BE, AV_PIX_FMT_RGBA64BE,
1229 AV_PIX_FMT_PAL8,
1230 AV_PIX_FMT_GRAY8, AV_PIX_FMT_GRAY8A,
1231 AV_PIX_FMT_GRAY16BE, AV_PIX_FMT_YA16BE,
1232 AV_PIX_FMT_NONE
1233 },
1234 .p.priv_class = &pngenc_class,
1235 .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE,
1236 };
1237