FFmpeg
pngenc.c
Go to the documentation of this file.
1 /*
2  * PNG image format
3  * Copyright (c) 2003 Fabrice Bellard
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 #include "avcodec.h"
23 #include "encode.h"
24 #include "internal.h"
25 #include "bytestream.h"
26 #include "lossless_videoencdsp.h"
27 #include "png.h"
28 #include "apng.h"
29 
30 #include "libavutil/avassert.h"
31 #include "libavutil/crc.h"
32 #include "libavutil/libm.h"
33 #include "libavutil/opt.h"
34 #include "libavutil/color_utils.h"
35 #include "libavutil/stereo3d.h"
36 
37 #include <zlib.h>
38 
39 #define IOBUF_SIZE 4096
40 
41 typedef struct APNGFctlChunk {
42  uint32_t sequence_number;
43  uint32_t width, height;
44  uint32_t x_offset, y_offset;
45  uint16_t delay_num, delay_den;
46  uint8_t dispose_op, blend_op;
48 
49 typedef struct PNGEncContext {
50  AVClass *class;
52 
53  uint8_t *bytestream;
54  uint8_t *bytestream_start;
55  uint8_t *bytestream_end;
56 
58 
59  z_stream zstream;
60  uint8_t buf[IOBUF_SIZE];
61  int dpi; ///< Physical pixel density, in dots per inch, if set
62  int dpm; ///< Physical pixel density, in dots per meter, if set
63 
65  int bit_depth;
68 
69  // APNG
70  uint32_t palette_checksum; // Used to ensure a single unique palette
71  uint32_t sequence_number;
73  uint8_t *extra_data;
75 
82 
83 static void png_get_interlaced_row(uint8_t *dst, int row_size,
84  int bits_per_pixel, int pass,
85  const uint8_t *src, int width)
86 {
87  int x, mask, dst_x, j, b, bpp;
88  uint8_t *d;
89  const uint8_t *s;
90  static const int masks[] = {0x80, 0x08, 0x88, 0x22, 0xaa, 0x55, 0xff};
91 
92  mask = masks[pass];
93  switch (bits_per_pixel) {
94  case 1:
95  memset(dst, 0, row_size);
96  dst_x = 0;
97  for (x = 0; x < width; x++) {
98  j = (x & 7);
99  if ((mask << j) & 0x80) {
100  b = (src[x >> 3] >> (7 - j)) & 1;
101  dst[dst_x >> 3] |= b << (7 - (dst_x & 7));
102  dst_x++;
103  }
104  }
105  break;
106  default:
107  bpp = bits_per_pixel >> 3;
108  d = dst;
109  s = src;
110  for (x = 0; x < width; x++) {
111  j = x & 7;
112  if ((mask << j) & 0x80) {
113  memcpy(d, s, bpp);
114  d += bpp;
115  }
116  s += bpp;
117  }
118  break;
119  }
120 }
121 
122 static void sub_png_paeth_prediction(uint8_t *dst, uint8_t *src, uint8_t *top,
123  int w, int bpp)
124 {
125  int i;
126  for (i = 0; i < w; i++) {
127  int a, b, c, p, pa, pb, pc;
128 
129  a = src[i - bpp];
130  b = top[i];
131  c = top[i - bpp];
132 
133  p = b - c;
134  pc = a - c;
135 
136  pa = abs(p);
137  pb = abs(pc);
138  pc = abs(p + pc);
139 
140  if (pa <= pb && pa <= pc)
141  p = a;
142  else if (pb <= pc)
143  p = b;
144  else
145  p = c;
146  dst[i] = src[i] - p;
147  }
148 }
149 
150 static void sub_left_prediction(PNGEncContext *c, uint8_t *dst, const uint8_t *src, int bpp, int size)
151 {
152  const uint8_t *src1 = src + bpp;
153  const uint8_t *src2 = src;
154  int x, unaligned_w;
155 
156  memcpy(dst, src, bpp);
157  dst += bpp;
158  size -= bpp;
159  unaligned_w = FFMIN(32 - bpp, size);
160  for (x = 0; x < unaligned_w; x++)
161  *dst++ = *src1++ - *src2++;
162  size -= unaligned_w;
163  c->llvidencdsp.diff_bytes(dst, src1, src2, size);
164 }
165 
166 static void png_filter_row(PNGEncContext *c, uint8_t *dst, int filter_type,
167  uint8_t *src, uint8_t *top, int size, int bpp)
168 {
169  int i;
170 
171  switch (filter_type) {
173  memcpy(dst, src, size);
174  break;
176  sub_left_prediction(c, dst, src, bpp, size);
177  break;
178  case PNG_FILTER_VALUE_UP:
179  c->llvidencdsp.diff_bytes(dst, src, top, size);
180  break;
182  for (i = 0; i < bpp; i++)
183  dst[i] = src[i] - (top[i] >> 1);
184  for (; i < size; i++)
185  dst[i] = src[i] - ((src[i - bpp] + top[i]) >> 1);
186  break;
188  for (i = 0; i < bpp; i++)
189  dst[i] = src[i] - top[i];
190  sub_png_paeth_prediction(dst + i, src + i, top + i, size - i, bpp);
191  break;
192  }
193 }
194 
195 static uint8_t *png_choose_filter(PNGEncContext *s, uint8_t *dst,
196  uint8_t *src, uint8_t *top, int size, int bpp)
197 {
198  int pred = s->filter_type;
199  av_assert0(bpp || !pred);
200  if (!top && pred)
202  if (pred == PNG_FILTER_VALUE_MIXED) {
203  int i;
204  int cost, bcost = INT_MAX;
205  uint8_t *buf1 = dst, *buf2 = dst + size + 16;
206  for (pred = 0; pred < 5; pred++) {
207  png_filter_row(s, buf1 + 1, pred, src, top, size, bpp);
208  buf1[0] = pred;
209  cost = 0;
210  for (i = 0; i <= size; i++)
211  cost += abs((int8_t) buf1[i]);
212  if (cost < bcost) {
213  bcost = cost;
214  FFSWAP(uint8_t *, buf1, buf2);
215  }
216  }
217  return buf2;
218  } else {
219  png_filter_row(s, dst + 1, pred, src, top, size, bpp);
220  dst[0] = pred;
221  return dst;
222  }
223 }
224 
225 static void png_write_chunk(uint8_t **f, uint32_t tag,
226  const uint8_t *buf, int length)
227 {
228  const AVCRC *crc_table = av_crc_get_table(AV_CRC_32_IEEE_LE);
229  uint32_t crc = ~0U;
230  uint8_t tagbuf[4];
231 
232  bytestream_put_be32(f, length);
233  AV_WL32(tagbuf, tag);
234  crc = av_crc(crc_table, crc, tagbuf, 4);
235  bytestream_put_be32(f, av_bswap32(tag));
236  if (length > 0) {
237  crc = av_crc(crc_table, crc, buf, length);
238  memcpy(*f, buf, length);
239  *f += length;
240  }
241  bytestream_put_be32(f, ~crc);
242 }
243 
245  const uint8_t *buf, int length)
246 {
247  PNGEncContext *s = avctx->priv_data;
248  const AVCRC *crc_table = av_crc_get_table(AV_CRC_32_IEEE_LE);
249  uint32_t crc = ~0U;
250 
251  if (avctx->codec_id == AV_CODEC_ID_PNG || avctx->frame_number == 0) {
252  png_write_chunk(&s->bytestream, MKTAG('I', 'D', 'A', 'T'), buf, length);
253  return;
254  }
255 
256  bytestream_put_be32(&s->bytestream, length + 4);
257 
258  bytestream_put_be32(&s->bytestream, MKBETAG('f', 'd', 'A', 'T'));
259  bytestream_put_be32(&s->bytestream, s->sequence_number);
260  crc = av_crc(crc_table, crc, s->bytestream - 8, 8);
261 
262  crc = av_crc(crc_table, crc, buf, length);
263  memcpy(s->bytestream, buf, length);
264  s->bytestream += length;
265 
266  bytestream_put_be32(&s->bytestream, ~crc);
267 
268  ++s->sequence_number;
269 }
270 
271 /* XXX: do filtering */
272 static int png_write_row(AVCodecContext *avctx, const uint8_t *data, int size)
273 {
274  PNGEncContext *s = avctx->priv_data;
275  int ret;
276 
277  s->zstream.avail_in = size;
278  s->zstream.next_in = data;
279  while (s->zstream.avail_in > 0) {
280  ret = deflate(&s->zstream, Z_NO_FLUSH);
281  if (ret != Z_OK)
282  return -1;
283  if (s->zstream.avail_out == 0) {
284  if (s->bytestream_end - s->bytestream > IOBUF_SIZE + 100)
285  png_write_image_data(avctx, s->buf, IOBUF_SIZE);
286  s->zstream.avail_out = IOBUF_SIZE;
287  s->zstream.next_out = s->buf;
288  }
289  }
290  return 0;
291 }
292 
293 #define AV_WB32_PNG(buf, n) AV_WB32(buf, lrint((n) * 100000))
294 static int png_get_chrm(enum AVColorPrimaries prim, uint8_t *buf)
295 {
296  double rx, ry, gx, gy, bx, by, wx = 0.3127, wy = 0.3290;
297  switch (prim) {
298  case AVCOL_PRI_BT709:
299  rx = 0.640; ry = 0.330;
300  gx = 0.300; gy = 0.600;
301  bx = 0.150; by = 0.060;
302  break;
303  case AVCOL_PRI_BT470M:
304  rx = 0.670; ry = 0.330;
305  gx = 0.210; gy = 0.710;
306  bx = 0.140; by = 0.080;
307  wx = 0.310; wy = 0.316;
308  break;
309  case AVCOL_PRI_BT470BG:
310  rx = 0.640; ry = 0.330;
311  gx = 0.290; gy = 0.600;
312  bx = 0.150; by = 0.060;
313  break;
314  case AVCOL_PRI_SMPTE170M:
315  case AVCOL_PRI_SMPTE240M:
316  rx = 0.630; ry = 0.340;
317  gx = 0.310; gy = 0.595;
318  bx = 0.155; by = 0.070;
319  break;
320  case AVCOL_PRI_BT2020:
321  rx = 0.708; ry = 0.292;
322  gx = 0.170; gy = 0.797;
323  bx = 0.131; by = 0.046;
324  break;
325  default:
326  return 0;
327  }
328 
329  AV_WB32_PNG(buf , wx); AV_WB32_PNG(buf + 4 , wy);
330  AV_WB32_PNG(buf + 8 , rx); AV_WB32_PNG(buf + 12, ry);
331  AV_WB32_PNG(buf + 16, gx); AV_WB32_PNG(buf + 20, gy);
332  AV_WB32_PNG(buf + 24, bx); AV_WB32_PNG(buf + 28, by);
333  return 1;
334 }
335 
336 static int png_get_gama(enum AVColorTransferCharacteristic trc, uint8_t *buf)
337 {
338  double gamma = avpriv_get_gamma_from_trc(trc);
339  if (gamma <= 1e-6)
340  return 0;
341 
342  AV_WB32_PNG(buf, 1.0 / gamma);
343  return 1;
344 }
345 
346 static int encode_headers(AVCodecContext *avctx, const AVFrame *pict)
347 {
348  AVFrameSideData *side_data;
349  PNGEncContext *s = avctx->priv_data;
350 
351  /* write png header */
352  AV_WB32(s->buf, avctx->width);
353  AV_WB32(s->buf + 4, avctx->height);
354  s->buf[8] = s->bit_depth;
355  s->buf[9] = s->color_type;
356  s->buf[10] = 0; /* compression type */
357  s->buf[11] = 0; /* filter type */
358  s->buf[12] = s->is_progressive; /* interlace type */
359  png_write_chunk(&s->bytestream, MKTAG('I', 'H', 'D', 'R'), s->buf, 13);
360 
361  /* write physical information */
362  if (s->dpm) {
363  AV_WB32(s->buf, s->dpm);
364  AV_WB32(s->buf + 4, s->dpm);
365  s->buf[8] = 1; /* unit specifier is meter */
366  } else {
367  AV_WB32(s->buf, avctx->sample_aspect_ratio.num);
368  AV_WB32(s->buf + 4, avctx->sample_aspect_ratio.den);
369  s->buf[8] = 0; /* unit specifier is unknown */
370  }
371  png_write_chunk(&s->bytestream, MKTAG('p', 'H', 'Y', 's'), s->buf, 9);
372 
373  /* write stereoscopic information */
375  if (side_data) {
376  AVStereo3D *stereo3d = (AVStereo3D *)side_data->data;
377  switch (stereo3d->type) {
379  s->buf[0] = ((stereo3d->flags & AV_STEREO3D_FLAG_INVERT) == 0) ? 1 : 0;
380  png_write_chunk(&s->bytestream, MKTAG('s', 'T', 'E', 'R'), s->buf, 1);
381  break;
382  case AV_STEREO3D_2D:
383  break;
384  default:
385  av_log(avctx, AV_LOG_WARNING, "Only side-by-side stereo3d flag can be defined within sTER chunk\n");
386  break;
387  }
388  }
389 
390  /* write colorspace information */
391  if (pict->color_primaries == AVCOL_PRI_BT709 &&
393  s->buf[0] = 1; /* rendering intent, relative colorimetric by default */
394  png_write_chunk(&s->bytestream, MKTAG('s', 'R', 'G', 'B'), s->buf, 1);
395  }
396 
397  if (png_get_chrm(pict->color_primaries, s->buf))
398  png_write_chunk(&s->bytestream, MKTAG('c', 'H', 'R', 'M'), s->buf, 32);
399  if (png_get_gama(pict->color_trc, s->buf))
400  png_write_chunk(&s->bytestream, MKTAG('g', 'A', 'M', 'A'), s->buf, 4);
401 
402  /* put the palette if needed */
403  if (s->color_type == PNG_COLOR_TYPE_PALETTE) {
404  int has_alpha, alpha, i;
405  unsigned int v;
406  uint32_t *palette;
407  uint8_t *ptr, *alpha_ptr;
408 
409  palette = (uint32_t *)pict->data[1];
410  ptr = s->buf;
411  alpha_ptr = s->buf + 256 * 3;
412  has_alpha = 0;
413  for (i = 0; i < 256; i++) {
414  v = palette[i];
415  alpha = v >> 24;
416  if (alpha != 0xff)
417  has_alpha = 1;
418  *alpha_ptr++ = alpha;
419  bytestream_put_be24(&ptr, v);
420  }
421  png_write_chunk(&s->bytestream,
422  MKTAG('P', 'L', 'T', 'E'), s->buf, 256 * 3);
423  if (has_alpha) {
424  png_write_chunk(&s->bytestream,
425  MKTAG('t', 'R', 'N', 'S'), s->buf + 256 * 3, 256);
426  }
427  }
428 
429  return 0;
430 }
431 
432 static int encode_frame(AVCodecContext *avctx, const AVFrame *pict)
433 {
434  PNGEncContext *s = avctx->priv_data;
435  const AVFrame *const p = pict;
436  int y, len, ret;
437  int row_size, pass_row_size;
438  uint8_t *ptr, *top, *crow_buf, *crow;
439  uint8_t *crow_base = NULL;
440  uint8_t *progressive_buf = NULL;
441  uint8_t *top_buf = NULL;
442 
443  row_size = (pict->width * s->bits_per_pixel + 7) >> 3;
444 
445  crow_base = av_malloc((row_size + 32) << (s->filter_type == PNG_FILTER_VALUE_MIXED));
446  if (!crow_base) {
447  ret = AVERROR(ENOMEM);
448  goto the_end;
449  }
450  // pixel data should be aligned, but there's a control byte before it
451  crow_buf = crow_base + 15;
452  if (s->is_progressive) {
453  progressive_buf = av_malloc(row_size + 1);
454  top_buf = av_malloc(row_size + 1);
455  if (!progressive_buf || !top_buf) {
456  ret = AVERROR(ENOMEM);
457  goto the_end;
458  }
459  }
460 
461  /* put each row */
462  s->zstream.avail_out = IOBUF_SIZE;
463  s->zstream.next_out = s->buf;
464  if (s->is_progressive) {
465  int pass;
466 
467  for (pass = 0; pass < NB_PASSES; pass++) {
468  /* NOTE: a pass is completely omitted if no pixels would be
469  * output */
470  pass_row_size = ff_png_pass_row_size(pass, s->bits_per_pixel, pict->width);
471  if (pass_row_size > 0) {
472  top = NULL;
473  for (y = 0; y < pict->height; y++)
474  if ((ff_png_pass_ymask[pass] << (y & 7)) & 0x80) {
475  ptr = p->data[0] + y * p->linesize[0];
476  FFSWAP(uint8_t *, progressive_buf, top_buf);
477  png_get_interlaced_row(progressive_buf, pass_row_size,
478  s->bits_per_pixel, pass,
479  ptr, pict->width);
480  crow = png_choose_filter(s, crow_buf, progressive_buf,
481  top, pass_row_size, s->bits_per_pixel >> 3);
482  png_write_row(avctx, crow, pass_row_size + 1);
483  top = progressive_buf;
484  }
485  }
486  }
487  } else {
488  top = NULL;
489  for (y = 0; y < pict->height; y++) {
490  ptr = p->data[0] + y * p->linesize[0];
491  crow = png_choose_filter(s, crow_buf, ptr, top,
492  row_size, s->bits_per_pixel >> 3);
493  png_write_row(avctx, crow, row_size + 1);
494  top = ptr;
495  }
496  }
497  /* compress last bytes */
498  for (;;) {
499  ret = deflate(&s->zstream, Z_FINISH);
500  if (ret == Z_OK || ret == Z_STREAM_END) {
501  len = IOBUF_SIZE - s->zstream.avail_out;
502  if (len > 0 && s->bytestream_end - s->bytestream > len + 100) {
503  png_write_image_data(avctx, s->buf, len);
504  }
505  s->zstream.avail_out = IOBUF_SIZE;
506  s->zstream.next_out = s->buf;
507  if (ret == Z_STREAM_END)
508  break;
509  } else {
510  ret = -1;
511  goto the_end;
512  }
513  }
514 
515  ret = 0;
516 
517 the_end:
518  av_freep(&crow_base);
519  av_freep(&progressive_buf);
520  av_freep(&top_buf);
521  deflateReset(&s->zstream);
522  return ret;
523 }
524 
525 static int encode_png(AVCodecContext *avctx, AVPacket *pkt,
526  const AVFrame *pict, int *got_packet)
527 {
528  PNGEncContext *s = avctx->priv_data;
529  int ret;
530  int enc_row_size;
531  size_t max_packet_size;
532 
533  enc_row_size = deflateBound(&s->zstream, (avctx->width * s->bits_per_pixel + 7) >> 3);
534  max_packet_size =
535  AV_INPUT_BUFFER_MIN_SIZE + // headers
536  avctx->height * (
537  enc_row_size +
538  12 * (((int64_t)enc_row_size + IOBUF_SIZE - 1) / IOBUF_SIZE) // IDAT * ceil(enc_row_size / IOBUF_SIZE)
539  );
540  if (max_packet_size > INT_MAX)
541  return AVERROR(ENOMEM);
542  ret = ff_alloc_packet(avctx, pkt, max_packet_size);
543  if (ret < 0)
544  return ret;
545 
546  s->bytestream_start =
547  s->bytestream = pkt->data;
548  s->bytestream_end = pkt->data + pkt->size;
549 
550  AV_WB64(s->bytestream, PNGSIG);
551  s->bytestream += 8;
552 
553  ret = encode_headers(avctx, pict);
554  if (ret < 0)
555  return ret;
556 
557  ret = encode_frame(avctx, pict);
558  if (ret < 0)
559  return ret;
560 
561  png_write_chunk(&s->bytestream, MKTAG('I', 'E', 'N', 'D'), NULL, 0);
562 
563  pkt->size = s->bytestream - s->bytestream_start;
565  *got_packet = 1;
566 
567  return 0;
568 }
569 
571  APNGFctlChunk *fctl_chunk, uint8_t bpp)
572 {
573  // output: background, input: foreground
574  // output the image such that when blended with the background, will produce the foreground
575 
576  unsigned int x, y;
577  unsigned int leftmost_x = input->width;
578  unsigned int rightmost_x = 0;
579  unsigned int topmost_y = input->height;
580  unsigned int bottommost_y = 0;
581  const uint8_t *input_data = input->data[0];
582  uint8_t *output_data = output->data[0];
583  ptrdiff_t input_linesize = input->linesize[0];
584  ptrdiff_t output_linesize = output->linesize[0];
585 
586  // Find bounding box of changes
587  for (y = 0; y < input->height; ++y) {
588  for (x = 0; x < input->width; ++x) {
589  if (!memcmp(input_data + bpp * x, output_data + bpp * x, bpp))
590  continue;
591 
592  if (x < leftmost_x)
593  leftmost_x = x;
594  if (x >= rightmost_x)
595  rightmost_x = x + 1;
596  if (y < topmost_y)
597  topmost_y = y;
598  if (y >= bottommost_y)
599  bottommost_y = y + 1;
600  }
601 
602  input_data += input_linesize;
603  output_data += output_linesize;
604  }
605 
606  if (leftmost_x == input->width && rightmost_x == 0) {
607  // Empty frame
608  // APNG does not support empty frames, so we make it a 1x1 frame
609  leftmost_x = topmost_y = 0;
610  rightmost_x = bottommost_y = 1;
611  }
612 
613  // Do actual inverse blending
614  if (fctl_chunk->blend_op == APNG_BLEND_OP_SOURCE) {
615  output_data = output->data[0];
616  for (y = topmost_y; y < bottommost_y; ++y) {
617  memcpy(output_data,
618  input->data[0] + input_linesize * y + bpp * leftmost_x,
619  bpp * (rightmost_x - leftmost_x));
620  output_data += output_linesize;
621  }
622  } else { // APNG_BLEND_OP_OVER
623  size_t transparent_palette_index;
624  uint32_t *palette;
625 
626  switch (input->format) {
627  case AV_PIX_FMT_RGBA64BE:
628  case AV_PIX_FMT_YA16BE:
629  case AV_PIX_FMT_RGBA:
630  case AV_PIX_FMT_GRAY8A:
631  break;
632 
633  case AV_PIX_FMT_PAL8:
634  palette = (uint32_t*)input->data[1];
635  for (transparent_palette_index = 0; transparent_palette_index < 256; ++transparent_palette_index)
636  if (palette[transparent_palette_index] >> 24 == 0)
637  break;
638  break;
639 
640  default:
641  // No alpha, so blending not possible
642  return -1;
643  }
644 
645  for (y = topmost_y; y < bottommost_y; ++y) {
646  uint8_t *foreground = input->data[0] + input_linesize * y + bpp * leftmost_x;
647  uint8_t *background = output->data[0] + output_linesize * y + bpp * leftmost_x;
648  output_data = output->data[0] + output_linesize * (y - topmost_y);
649  for (x = leftmost_x; x < rightmost_x; ++x, foreground += bpp, background += bpp, output_data += bpp) {
650  if (!memcmp(foreground, background, bpp)) {
651  if (input->format == AV_PIX_FMT_PAL8) {
652  if (transparent_palette_index == 256) {
653  // Need fully transparent colour, but none exists
654  return -1;
655  }
656 
657  *output_data = transparent_palette_index;
658  } else {
659  memset(output_data, 0, bpp);
660  }
661  continue;
662  }
663 
664  // Check for special alpha values, since full inverse
665  // alpha-on-alpha blending is rarely possible, and when
666  // possible, doesn't compress much better than
667  // APNG_BLEND_OP_SOURCE blending
668  switch (input->format) {
669  case AV_PIX_FMT_RGBA64BE:
670  if (((uint16_t*)foreground)[3] == 0xffff ||
671  ((uint16_t*)background)[3] == 0)
672  break;
673  return -1;
674 
675  case AV_PIX_FMT_YA16BE:
676  if (((uint16_t*)foreground)[1] == 0xffff ||
677  ((uint16_t*)background)[1] == 0)
678  break;
679  return -1;
680 
681  case AV_PIX_FMT_RGBA:
682  if (foreground[3] == 0xff || background[3] == 0)
683  break;
684  return -1;
685 
686  case AV_PIX_FMT_GRAY8A:
687  if (foreground[1] == 0xff || background[1] == 0)
688  break;
689  return -1;
690 
691  case AV_PIX_FMT_PAL8:
692  if (palette[*foreground] >> 24 == 0xff ||
693  palette[*background] >> 24 == 0)
694  break;
695  return -1;
696  }
697 
698  memmove(output_data, foreground, bpp);
699  }
700  }
701  }
702 
703  output->width = rightmost_x - leftmost_x;
704  output->height = bottommost_y - topmost_y;
705  fctl_chunk->width = output->width;
706  fctl_chunk->height = output->height;
707  fctl_chunk->x_offset = leftmost_x;
708  fctl_chunk->y_offset = topmost_y;
709 
710  return 0;
711 }
712 
713 static int apng_encode_frame(AVCodecContext *avctx, const AVFrame *pict,
714  APNGFctlChunk *best_fctl_chunk, APNGFctlChunk *best_last_fctl_chunk)
715 {
716  PNGEncContext *s = avctx->priv_data;
717  int ret;
718  unsigned int y;
719  AVFrame* diffFrame;
720  uint8_t bpp = (s->bits_per_pixel + 7) >> 3;
721  uint8_t *original_bytestream, *original_bytestream_end;
722  uint8_t *temp_bytestream = 0, *temp_bytestream_end;
723  uint32_t best_sequence_number;
724  uint8_t *best_bytestream;
725  size_t best_bytestream_size = SIZE_MAX;
726  APNGFctlChunk last_fctl_chunk = *best_last_fctl_chunk;
727  APNGFctlChunk fctl_chunk = *best_fctl_chunk;
728 
729  if (avctx->frame_number == 0) {
730  best_fctl_chunk->width = pict->width;
731  best_fctl_chunk->height = pict->height;
732  best_fctl_chunk->x_offset = 0;
733  best_fctl_chunk->y_offset = 0;
734  best_fctl_chunk->blend_op = APNG_BLEND_OP_SOURCE;
735  return encode_frame(avctx, pict);
736  }
737 
738  diffFrame = av_frame_alloc();
739  if (!diffFrame)
740  return AVERROR(ENOMEM);
741 
742  diffFrame->format = pict->format;
743  diffFrame->width = pict->width;
744  diffFrame->height = pict->height;
745  if ((ret = av_frame_get_buffer(diffFrame, 0)) < 0)
746  goto fail;
747 
748  original_bytestream = s->bytestream;
749  original_bytestream_end = s->bytestream_end;
750 
751  temp_bytestream = av_malloc(original_bytestream_end - original_bytestream);
752  if (!temp_bytestream) {
753  ret = AVERROR(ENOMEM);
754  goto fail;
755  }
756  temp_bytestream_end = temp_bytestream + (original_bytestream_end - original_bytestream);
757 
758  for (last_fctl_chunk.dispose_op = 0; last_fctl_chunk.dispose_op < 3; ++last_fctl_chunk.dispose_op) {
759  // 0: APNG_DISPOSE_OP_NONE
760  // 1: APNG_DISPOSE_OP_BACKGROUND
761  // 2: APNG_DISPOSE_OP_PREVIOUS
762 
763  for (fctl_chunk.blend_op = 0; fctl_chunk.blend_op < 2; ++fctl_chunk.blend_op) {
764  // 0: APNG_BLEND_OP_SOURCE
765  // 1: APNG_BLEND_OP_OVER
766 
767  uint32_t original_sequence_number = s->sequence_number, sequence_number;
768  uint8_t *bytestream_start = s->bytestream;
769  size_t bytestream_size;
770 
771  // Do disposal
772  if (last_fctl_chunk.dispose_op != APNG_DISPOSE_OP_PREVIOUS) {
773  diffFrame->width = pict->width;
774  diffFrame->height = pict->height;
775  ret = av_frame_copy(diffFrame, s->last_frame);
776  if (ret < 0)
777  goto fail;
778 
779  if (last_fctl_chunk.dispose_op == APNG_DISPOSE_OP_BACKGROUND) {
780  for (y = last_fctl_chunk.y_offset; y < last_fctl_chunk.y_offset + last_fctl_chunk.height; ++y) {
781  size_t row_start = diffFrame->linesize[0] * y + bpp * last_fctl_chunk.x_offset;
782  memset(diffFrame->data[0] + row_start, 0, bpp * last_fctl_chunk.width);
783  }
784  }
785  } else {
786  if (!s->prev_frame)
787  continue;
788 
789  diffFrame->width = pict->width;
790  diffFrame->height = pict->height;
791  ret = av_frame_copy(diffFrame, s->prev_frame);
792  if (ret < 0)
793  goto fail;
794  }
795 
796  // Do inverse blending
797  if (apng_do_inverse_blend(diffFrame, pict, &fctl_chunk, bpp) < 0)
798  continue;
799 
800  // Do encoding
801  ret = encode_frame(avctx, diffFrame);
802  sequence_number = s->sequence_number;
803  s->sequence_number = original_sequence_number;
804  bytestream_size = s->bytestream - bytestream_start;
805  s->bytestream = bytestream_start;
806  if (ret < 0)
807  goto fail;
808 
809  if (bytestream_size < best_bytestream_size) {
810  *best_fctl_chunk = fctl_chunk;
811  *best_last_fctl_chunk = last_fctl_chunk;
812 
813  best_sequence_number = sequence_number;
814  best_bytestream = s->bytestream;
815  best_bytestream_size = bytestream_size;
816 
817  if (best_bytestream == original_bytestream) {
818  s->bytestream = temp_bytestream;
819  s->bytestream_end = temp_bytestream_end;
820  } else {
821  s->bytestream = original_bytestream;
822  s->bytestream_end = original_bytestream_end;
823  }
824  }
825  }
826  }
827 
828  s->sequence_number = best_sequence_number;
829  s->bytestream = original_bytestream + best_bytestream_size;
830  s->bytestream_end = original_bytestream_end;
831  if (best_bytestream != original_bytestream)
832  memcpy(original_bytestream, best_bytestream, best_bytestream_size);
833 
834  ret = 0;
835 
836 fail:
837  av_freep(&temp_bytestream);
838  av_frame_free(&diffFrame);
839  return ret;
840 }
841 
843  const AVFrame *pict, int *got_packet)
844 {
845  PNGEncContext *s = avctx->priv_data;
846  int ret;
847  int enc_row_size;
848  size_t max_packet_size;
849  APNGFctlChunk fctl_chunk = {0};
850 
851  if (pict && s->color_type == PNG_COLOR_TYPE_PALETTE) {
852  uint32_t checksum = ~av_crc(av_crc_get_table(AV_CRC_32_IEEE_LE), ~0U, pict->data[1], 256 * sizeof(uint32_t));
853 
854  if (avctx->frame_number == 0) {
855  s->palette_checksum = checksum;
856  } else if (checksum != s->palette_checksum) {
857  av_log(avctx, AV_LOG_ERROR,
858  "Input contains more than one unique palette. APNG does not support multiple palettes.\n");
859  return -1;
860  }
861  }
862 
863  enc_row_size = deflateBound(&s->zstream, (avctx->width * s->bits_per_pixel + 7) >> 3);
864  max_packet_size =
865  AV_INPUT_BUFFER_MIN_SIZE + // headers
866  avctx->height * (
867  enc_row_size +
868  (4 + 12) * (((int64_t)enc_row_size + IOBUF_SIZE - 1) / IOBUF_SIZE) // fdAT * ceil(enc_row_size / IOBUF_SIZE)
869  );
870  if (max_packet_size > INT_MAX)
871  return AVERROR(ENOMEM);
872 
873  if (avctx->frame_number == 0) {
874  if (!pict)
875  return AVERROR(EINVAL);
876 
877  s->bytestream = s->extra_data = av_malloc(AV_INPUT_BUFFER_MIN_SIZE);
878  if (!s->extra_data)
879  return AVERROR(ENOMEM);
880 
881  ret = encode_headers(avctx, pict);
882  if (ret < 0)
883  return ret;
884 
885  s->extra_data_size = s->bytestream - s->extra_data;
886 
887  s->last_frame_packet = av_malloc(max_packet_size);
888  if (!s->last_frame_packet)
889  return AVERROR(ENOMEM);
890  } else if (s->last_frame) {
891  ret = ff_get_encode_buffer(avctx, pkt, s->last_frame_packet_size, 0);
892  if (ret < 0)
893  return ret;
894 
895  memcpy(pkt->data, s->last_frame_packet, s->last_frame_packet_size);
896  pkt->pts = pkt->dts = s->last_frame->pts;
897  }
898 
899  if (pict) {
900  s->bytestream_start =
901  s->bytestream = s->last_frame_packet;
902  s->bytestream_end = s->bytestream + max_packet_size;
903 
904  // We're encoding the frame first, so we have to do a bit of shuffling around
905  // to have the image data write to the correct place in the buffer
906  fctl_chunk.sequence_number = s->sequence_number;
907  ++s->sequence_number;
908  s->bytestream += 26 + 12;
909 
910  ret = apng_encode_frame(avctx, pict, &fctl_chunk, &s->last_frame_fctl);
911  if (ret < 0)
912  return ret;
913 
914  fctl_chunk.delay_num = 0; // delay filled in during muxing
915  fctl_chunk.delay_den = 0;
916  } else {
917  s->last_frame_fctl.dispose_op = APNG_DISPOSE_OP_NONE;
918  }
919 
920  if (s->last_frame) {
921  uint8_t* last_fctl_chunk_start = pkt->data;
922  uint8_t buf[26];
923  if (!s->extra_data_updated) {
924  uint8_t *side_data = av_packet_new_side_data(pkt, AV_PKT_DATA_NEW_EXTRADATA, s->extra_data_size);
925  if (!side_data)
926  return AVERROR(ENOMEM);
927  memcpy(side_data, s->extra_data, s->extra_data_size);
928  s->extra_data_updated = 1;
929  }
930 
931  AV_WB32(buf + 0, s->last_frame_fctl.sequence_number);
932  AV_WB32(buf + 4, s->last_frame_fctl.width);
933  AV_WB32(buf + 8, s->last_frame_fctl.height);
934  AV_WB32(buf + 12, s->last_frame_fctl.x_offset);
935  AV_WB32(buf + 16, s->last_frame_fctl.y_offset);
936  AV_WB16(buf + 20, s->last_frame_fctl.delay_num);
937  AV_WB16(buf + 22, s->last_frame_fctl.delay_den);
938  buf[24] = s->last_frame_fctl.dispose_op;
939  buf[25] = s->last_frame_fctl.blend_op;
940  png_write_chunk(&last_fctl_chunk_start, MKTAG('f', 'c', 'T', 'L'), buf, 26);
941 
942  *got_packet = 1;
943  }
944 
945  if (pict) {
946  if (!s->last_frame) {
947  s->last_frame = av_frame_alloc();
948  if (!s->last_frame)
949  return AVERROR(ENOMEM);
950  } else if (s->last_frame_fctl.dispose_op != APNG_DISPOSE_OP_PREVIOUS) {
951  if (!s->prev_frame) {
952  s->prev_frame = av_frame_alloc();
953  if (!s->prev_frame)
954  return AVERROR(ENOMEM);
955 
956  s->prev_frame->format = pict->format;
957  s->prev_frame->width = pict->width;
958  s->prev_frame->height = pict->height;
959  if ((ret = av_frame_get_buffer(s->prev_frame, 0)) < 0)
960  return ret;
961  }
962 
963  // Do disposal, but not blending
964  av_frame_copy(s->prev_frame, s->last_frame);
965  if (s->last_frame_fctl.dispose_op == APNG_DISPOSE_OP_BACKGROUND) {
966  uint32_t y;
967  uint8_t bpp = (s->bits_per_pixel + 7) >> 3;
968  for (y = s->last_frame_fctl.y_offset; y < s->last_frame_fctl.y_offset + s->last_frame_fctl.height; ++y) {
969  size_t row_start = s->prev_frame->linesize[0] * y + bpp * s->last_frame_fctl.x_offset;
970  memset(s->prev_frame->data[0] + row_start, 0, bpp * s->last_frame_fctl.width);
971  }
972  }
973  }
974 
975  av_frame_unref(s->last_frame);
976  ret = av_frame_ref(s->last_frame, (AVFrame*)pict);
977  if (ret < 0)
978  return ret;
979 
980  s->last_frame_fctl = fctl_chunk;
981  s->last_frame_packet_size = s->bytestream - s->bytestream_start;
982  } else {
983  av_frame_free(&s->last_frame);
984  }
985 
986  return 0;
987 }
988 
990 {
991  PNGEncContext *s = avctx->priv_data;
992  int compression_level;
993 
994  switch (avctx->pix_fmt) {
995  case AV_PIX_FMT_RGBA:
996  avctx->bits_per_coded_sample = 32;
997  break;
998  case AV_PIX_FMT_RGB24:
999  avctx->bits_per_coded_sample = 24;
1000  break;
1001  case AV_PIX_FMT_GRAY8:
1002  avctx->bits_per_coded_sample = 0x28;
1003  break;
1004  case AV_PIX_FMT_MONOBLACK:
1005  avctx->bits_per_coded_sample = 1;
1006  break;
1007  case AV_PIX_FMT_PAL8:
1008  avctx->bits_per_coded_sample = 8;
1009  }
1010 
1011  ff_llvidencdsp_init(&s->llvidencdsp);
1012 
1013  if (avctx->pix_fmt == AV_PIX_FMT_MONOBLACK)
1014  s->filter_type = PNG_FILTER_VALUE_NONE;
1015 
1016  if (s->dpi && s->dpm) {
1017  av_log(avctx, AV_LOG_ERROR, "Only one of 'dpi' or 'dpm' options should be set\n");
1018  return AVERROR(EINVAL);
1019  } else if (s->dpi) {
1020  s->dpm = s->dpi * 10000 / 254;
1021  }
1022 
1023  s->is_progressive = !!(avctx->flags & AV_CODEC_FLAG_INTERLACED_DCT);
1024  switch (avctx->pix_fmt) {
1025  case AV_PIX_FMT_RGBA64BE:
1026  s->bit_depth = 16;
1027  s->color_type = PNG_COLOR_TYPE_RGB_ALPHA;
1028  break;
1029  case AV_PIX_FMT_RGB48BE:
1030  s->bit_depth = 16;
1031  s->color_type = PNG_COLOR_TYPE_RGB;
1032  break;
1033  case AV_PIX_FMT_RGBA:
1034  s->bit_depth = 8;
1035  s->color_type = PNG_COLOR_TYPE_RGB_ALPHA;
1036  break;
1037  case AV_PIX_FMT_RGB24:
1038  s->bit_depth = 8;
1039  s->color_type = PNG_COLOR_TYPE_RGB;
1040  break;
1041  case AV_PIX_FMT_GRAY16BE:
1042  s->bit_depth = 16;
1043  s->color_type = PNG_COLOR_TYPE_GRAY;
1044  break;
1045  case AV_PIX_FMT_GRAY8:
1046  s->bit_depth = 8;
1047  s->color_type = PNG_COLOR_TYPE_GRAY;
1048  break;
1049  case AV_PIX_FMT_GRAY8A:
1050  s->bit_depth = 8;
1051  s->color_type = PNG_COLOR_TYPE_GRAY_ALPHA;
1052  break;
1053  case AV_PIX_FMT_YA16BE:
1054  s->bit_depth = 16;
1055  s->color_type = PNG_COLOR_TYPE_GRAY_ALPHA;
1056  break;
1057  case AV_PIX_FMT_MONOBLACK:
1058  s->bit_depth = 1;
1059  s->color_type = PNG_COLOR_TYPE_GRAY;
1060  break;
1061  case AV_PIX_FMT_PAL8:
1062  s->bit_depth = 8;
1063  s->color_type = PNG_COLOR_TYPE_PALETTE;
1064  break;
1065  default:
1066  return -1;
1067  }
1068  s->bits_per_pixel = ff_png_get_nb_channels(s->color_type) * s->bit_depth;
1069 
1070  s->zstream.zalloc = ff_png_zalloc;
1071  s->zstream.zfree = ff_png_zfree;
1072  s->zstream.opaque = NULL;
1073  compression_level = avctx->compression_level == FF_COMPRESSION_DEFAULT
1074  ? Z_DEFAULT_COMPRESSION
1075  : av_clip(avctx->compression_level, 0, 9);
1076  if (deflateInit2(&s->zstream, compression_level, Z_DEFLATED, 15, 8, Z_DEFAULT_STRATEGY) != Z_OK)
1077  return -1;
1078 
1079  return 0;
1080 }
1081 
1083 {
1084  PNGEncContext *s = avctx->priv_data;
1085 
1086  deflateEnd(&s->zstream);
1087  av_frame_free(&s->last_frame);
1088  av_frame_free(&s->prev_frame);
1089  av_freep(&s->last_frame_packet);
1090  av_freep(&s->extra_data);
1091  s->extra_data_size = 0;
1092  return 0;
1093 }
1094 
1095 #define OFFSET(x) offsetof(PNGEncContext, x)
1096 #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
1097 static const AVOption options[] = {
1098  {"dpi", "Set image resolution (in dots per inch)", OFFSET(dpi), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 0x10000, VE},
1099  {"dpm", "Set image resolution (in dots per meter)", OFFSET(dpm), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 0x10000, VE},
1100  { "pred", "Prediction method", OFFSET(filter_type), AV_OPT_TYPE_INT, { .i64 = PNG_FILTER_VALUE_NONE }, PNG_FILTER_VALUE_NONE, PNG_FILTER_VALUE_MIXED, VE, "pred" },
1101  { "none", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_NONE }, INT_MIN, INT_MAX, VE, "pred" },
1102  { "sub", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_SUB }, INT_MIN, INT_MAX, VE, "pred" },
1103  { "up", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_UP }, INT_MIN, INT_MAX, VE, "pred" },
1104  { "avg", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_AVG }, INT_MIN, INT_MAX, VE, "pred" },
1105  { "paeth", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_PAETH }, INT_MIN, INT_MAX, VE, "pred" },
1106  { "mixed", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_MIXED }, INT_MIN, INT_MAX, VE, "pred" },
1107  { NULL},
1108 };
1109 
1110 static const AVClass pngenc_class = {
1111  .class_name = "(A)PNG encoder",
1112  .item_name = av_default_item_name,
1113  .option = options,
1114  .version = LIBAVUTIL_VERSION_INT,
1115 };
1116 
1118  .name = "png",
1119  .long_name = NULL_IF_CONFIG_SMALL("PNG (Portable Network Graphics) image"),
1120  .type = AVMEDIA_TYPE_VIDEO,
1121  .id = AV_CODEC_ID_PNG,
1122  .priv_data_size = sizeof(PNGEncContext),
1123  .init = png_enc_init,
1124  .close = png_enc_close,
1125  .encode2 = encode_png,
1126  .capabilities = AV_CODEC_CAP_FRAME_THREADS,
1127  .pix_fmts = (const enum AVPixelFormat[]) {
1134  },
1135  .priv_class = &pngenc_class,
1136  .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE,
1137 };
1138 
1140  .name = "apng",
1141  .long_name = NULL_IF_CONFIG_SMALL("APNG (Animated Portable Network Graphics) image"),
1142  .type = AVMEDIA_TYPE_VIDEO,
1143  .id = AV_CODEC_ID_APNG,
1144  .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY,
1145  .priv_data_size = sizeof(PNGEncContext),
1146  .init = png_enc_init,
1147  .close = png_enc_close,
1148  .encode2 = encode_apng,
1149  .pix_fmts = (const enum AVPixelFormat[]) {
1156  },
1157  .priv_class = &pngenc_class,
1158  .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE,
1159 };
AVFrame::color_trc
enum AVColorTransferCharacteristic color_trc
Definition: frame.h:534
AVCodec
AVCodec.
Definition: codec.h:202
encode_frame
static int encode_frame(AVCodecContext *avctx, const AVFrame *pict)
Definition: pngenc.c:432
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
FF_CODEC_CAP_INIT_THREADSAFE
#define FF_CODEC_CAP_INIT_THREADSAFE
The codec does not modify any global variables in the init function, allowing to call the init functi...
Definition: internal.h:42
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
av_clip
#define av_clip
Definition: common.h:96
init
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:31
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
PNGEncContext::buf
uint8_t buf[IOBUF_SIZE]
Definition: pngenc.c:60
AV_WL32
#define AV_WL32(p, v)
Definition: intreadwrite.h:426
AVColorTransferCharacteristic
AVColorTransferCharacteristic
Color Transfer Characteristic.
Definition: pixfmt.h:476
libm.h
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:246
av_frame_get_side_data
AVFrameSideData * av_frame_get_side_data(const AVFrame *frame, enum AVFrameSideDataType type)
Definition: frame.c:617
AV_WB32_PNG
#define AV_WB32_PNG(buf, n)
Definition: pngenc.c:293
AVCRC
uint32_t AVCRC
Definition: crc.h:46
png_get_chrm
static int png_get_chrm(enum AVColorPrimaries prim, uint8_t *buf)
Definition: pngenc.c:294
ff_png_get_nb_channels
int ff_png_get_nb_channels(int color_type)
Definition: png.c:51
APNG_DISPOSE_OP_BACKGROUND
@ APNG_DISPOSE_OP_BACKGROUND
Definition: apng.h:32
PNGEncContext::bits_per_pixel
int bits_per_pixel
Definition: pngenc.c:67
PNGEncContext::last_frame
AVFrame * last_frame
Definition: pngenc.c:77
output
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce output
Definition: filter_design.txt:225
ff_apng_encoder
const AVCodec ff_apng_encoder
Definition: pngenc.c:1139
AVFrame::color_primaries
enum AVColorPrimaries color_primaries
Definition: frame.h:532
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:112
apng_encode_frame
static int apng_encode_frame(AVCodecContext *avctx, const AVFrame *pict, APNGFctlChunk *best_fctl_chunk, APNGFctlChunk *best_last_fctl_chunk)
Definition: pngenc.c:713
APNGFctlChunk::delay_num
uint16_t delay_num
Definition: pngenc.c:45
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:303
AV_PIX_FMT_RGBA64BE
@ AV_PIX_FMT_RGBA64BE
packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is st...
Definition: pixfmt.h:195
AVFrame::width
int width
Definition: frame.h:361
PNG_FILTER_VALUE_MIXED
#define PNG_FILTER_VALUE_MIXED
Definition: png.h:45
w
uint8_t w
Definition: llviddspenc.c:38
internal.h
AVPacket::data
uint8_t * data
Definition: packet.h:373
AVOption
AVOption.
Definition: opt.h:247
encode.h
b
#define b
Definition: input.c:40
data
const char data[16]
Definition: mxf.c:143
png_write_row
static int png_write_row(AVCodecContext *avctx, const uint8_t *data, int size)
Definition: pngenc.c:272
output_data
static int output_data(MLPDecodeContext *m, unsigned int substr, AVFrame *frame, int *got_frame_ptr)
Write the audio data into the output buffer.
Definition: mlpdec.c:1068
PNGEncContext::dpm
int dpm
Physical pixel density, in dots per meter, if set.
Definition: pngenc.c:62
png_get_gama
static int png_get_gama(enum AVColorTransferCharacteristic trc, uint8_t *buf)
Definition: pngenc.c:336
PNGEncContext::last_frame_packet
uint8_t * last_frame_packet
Definition: pngenc.c:79
AVColorPrimaries
AVColorPrimaries
Chromaticity coordinates of the source primaries.
Definition: pixfmt.h:451
AV_CODEC_ID_APNG
@ AV_CODEC_ID_APNG
Definition: codec_id.h:264
FF_COMPRESSION_DEFAULT
#define FF_COMPRESSION_DEFAULT
Definition: avcodec.h:456
ff_png_zfree
void ff_png_zfree(void *opaque, void *ptr)
Definition: png.c:46
AV_PKT_FLAG_KEY
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: packet.h:425
ff_png_encoder
const AVCodec ff_png_encoder
Definition: pngenc.c:1117
AV_WB64
#define AV_WB64(p, v)
Definition: intreadwrite.h:433
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:317
av_malloc
#define av_malloc(s)
Definition: tableprint_vlc.h:31
NB_PASSES
#define NB_PASSES
Definition: png.h:47
crc.h
AV_PIX_FMT_GRAY16BE
@ AV_PIX_FMT_GRAY16BE
Y , 16bpp, big-endian.
Definition: pixfmt.h:97
AV_STEREO3D_SIDEBYSIDE
@ AV_STEREO3D_SIDEBYSIDE
Views are next to each other.
Definition: stereo3d.h:67
PNGEncContext::prev_frame
AVFrame * prev_frame
Definition: pngenc.c:76
AVCOL_TRC_IEC61966_2_1
@ AVCOL_TRC_IEC61966_2_1
IEC 61966-2-1 (sRGB or sYCC)
Definition: pixfmt.h:490
U
#define U(x)
Definition: vp56_arith.h:37
ff_png_pass_row_size
int ff_png_pass_row_size(int pass, int bits_per_pixel, int width)
Definition: png.c:64
fail
#define fail()
Definition: checkasm.h:127
AV_STEREO3D_2D
@ AV_STEREO3D_2D
Video is not stereoscopic (and metadata has to be there).
Definition: stereo3d.h:55
avpriv_get_gamma_from_trc
double avpriv_get_gamma_from_trc(enum AVColorTransferCharacteristic trc)
Determine a suitable 'gamma' value to match the supplied AVColorTransferCharacteristic.
Definition: color_utils.c:28
AVCodecContext::flags
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:463
APNGFctlChunk::blend_op
uint8_t blend_op
Definition: pngenc.c:46
AVRational::num
int num
Numerator.
Definition: rational.h:59
encode_png
static int encode_png(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *pict, int *got_packet)
Definition: pngenc.c:525
PNG_COLOR_TYPE_RGB_ALPHA
#define PNG_COLOR_TYPE_RGB_ALPHA
Definition: png.h:36
AV_CODEC_FLAG_INTERLACED_DCT
#define AV_CODEC_FLAG_INTERLACED_DCT
Use interlaced DCT.
Definition: avcodec.h:260
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:99
av_bswap32
#define av_bswap32
Definition: bswap.h:33
color_utils.h
avassert.h
pkt
AVPacket * pkt
Definition: movenc.c:59
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
av_cold
#define av_cold
Definition: attributes.h:90
encode_apng
static int encode_apng(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *pict, int *got_packet)
Definition: pngenc.c:842
mask
static const uint16_t mask[17]
Definition: lzw.c:38
PNGEncContext::bytestream_end
uint8_t * bytestream_end
Definition: pngenc.c:55
width
#define width
stereo3d.h
s
#define s(width, name)
Definition: cbs_vp9.c:257
png_filter_row
static void png_filter_row(PNGEncContext *c, uint8_t *dst, int filter_type, uint8_t *src, uint8_t *top, int size, int bpp)
Definition: pngenc.c:166
png_write_chunk
static void png_write_chunk(uint8_t **f, uint32_t tag, const uint8_t *buf, int length)
Definition: pngenc.c:225
PNG_COLOR_TYPE_RGB
#define PNG_COLOR_TYPE_RGB
Definition: png.h:35
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:290
AV_INPUT_BUFFER_MIN_SIZE
#define AV_INPUT_BUFFER_MIN_SIZE
Definition: avcodec.h:185
png_write_image_data
static void png_write_image_data(AVCodecContext *avctx, const uint8_t *buf, int length)
Definition: pngenc.c:244
APNG_DISPOSE_OP_PREVIOUS
@ APNG_DISPOSE_OP_PREVIOUS
Definition: apng.h:33
AVCOL_PRI_SMPTE240M
@ AVCOL_PRI_SMPTE240M
identical to above, also called "SMPTE C" even though it uses D65
Definition: pixfmt.h:460
APNG_DISPOSE_OP_NONE
@ APNG_DISPOSE_OP_NONE
Definition: apng.h:31
f
#define f(width, name)
Definition: cbs_vp9.c:255
pass
#define pass
Definition: fft_template.c:601
AV_PIX_FMT_RGBA
@ AV_PIX_FMT_RGBA
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:93
AVCOL_PRI_BT470BG
@ AVCOL_PRI_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM
Definition: pixfmt.h:458
AVCodecContext::codec_id
enum AVCodecID codec_id
Definition: avcodec.h:393
AVStereo3D::flags
int flags
Additional information about the frame packing.
Definition: stereo3d.h:185
AVCOL_PRI_SMPTE170M
@ AVCOL_PRI_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC
Definition: pixfmt.h:459
AV_CODEC_ID_PNG
@ AV_CODEC_ID_PNG
Definition: codec_id.h:111
PNGEncContext
Definition: pngenc.c:49
APNGFctlChunk::y_offset
uint32_t y_offset
Definition: pngenc.c:44
AV_CODEC_CAP_FRAME_THREADS
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: codec.h:113
AV_PIX_FMT_GRAY8A
@ AV_PIX_FMT_GRAY8A
alias for AV_PIX_FMT_YA8
Definition: pixfmt.h:136
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
APNGFctlChunk::delay_den
uint16_t delay_den
Definition: pngenc.c:45
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
NULL
#define NULL
Definition: coverity.c:32
apng.h
AV_WB16
#define AV_WB16(p, v)
Definition: intreadwrite.h:405
IOBUF_SIZE
#define IOBUF_SIZE
Definition: pngenc.c:39
AV_PIX_FMT_MONOBLACK
@ AV_PIX_FMT_MONOBLACK
Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb.
Definition: pixfmt.h:76
AVCOL_PRI_BT709
@ AVCOL_PRI_BT709
also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP 177 Annex B
Definition: pixfmt.h:453
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:235
apng_do_inverse_blend
static int apng_do_inverse_blend(AVFrame *output, const AVFrame *input, APNGFctlChunk *fctl_chunk, uint8_t bpp)
Definition: pngenc.c:570
APNGFctlChunk::width
uint32_t width
Definition: pngenc.c:43
src
#define src
Definition: vp8dsp.c:255
png_enc_close
static av_cold int png_enc_close(AVCodecContext *avctx)
Definition: pngenc.c:1082
PNG_COLOR_TYPE_GRAY
#define PNG_COLOR_TYPE_GRAY
Definition: png.h:33
deflate
static void deflate(uint8_t *dst, const uint8_t *p1, int width, int threshold, const uint8_t *coordinates[], int coord, int maxc)
Definition: vf_neighbor.c:160
PNGEncContext::filter_type
int filter_type
Definition: pngenc.c:57
abs
#define abs(x)
Definition: cuda_runtime.h:35
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:74
PNGEncContext::extra_data_updated
int extra_data_updated
Definition: pngenc.c:72
APNGFctlChunk
Definition: pngenc.c:41
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
ff_png_pass_ymask
const uint8_t ff_png_pass_ymask[NB_PASSES]
Definition: png.c:27
ff_llvidencdsp_init
av_cold void ff_llvidencdsp_init(LLVidEncDSPContext *c)
Definition: lossless_videoencdsp.c:91
for
for(j=16;j >0;--j)
Definition: h264pred_template.c:469
AVCOL_PRI_BT2020
@ AVCOL_PRI_BT2020
ITU-R BT2020.
Definition: pixfmt.h:462
APNGFctlChunk::sequence_number
uint32_t sequence_number
Definition: pngenc.c:42
AV_WB32
#define AV_WB32(p, v)
Definition: intreadwrite.h:419
PNG_FILTER_VALUE_NONE
#define PNG_FILTER_VALUE_NONE
Definition: png.h:40
AV_PIX_FMT_RGB24
@ AV_PIX_FMT_RGB24
packed RGB 8:8:8, 24bpp, RGBRGB...
Definition: pixfmt.h:68
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
AVPacket::size
int size
Definition: packet.h:374
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:117
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:327
av_frame_copy
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:678
AV_PIX_FMT_YA16BE
@ AV_PIX_FMT_YA16BE
16 bits gray, 16 bits alpha (big-endian)
Definition: pixfmt.h:202
PNGEncContext::last_frame_packet_size
size_t last_frame_packet_size
Definition: pngenc.c:80
PNG_FILTER_VALUE_AVG
#define PNG_FILTER_VALUE_AVG
Definition: png.h:43
size
int size
Definition: twinvq_data.h:10344
MKBETAG
#define MKBETAG(a, b, c, d)
Definition: macros.h:56
PNGEncContext::llvidencdsp
LLVidEncDSPContext llvidencdsp
Definition: pngenc.c:51
AVFrameSideData::data
uint8_t * data
Definition: frame.h:211
PNG_FILTER_VALUE_PAETH
#define PNG_FILTER_VALUE_PAETH
Definition: png.h:44
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:376
PNGEncContext::extra_data
uint8_t * extra_data
Definition: pngenc.c:73
PNG_FILTER_VALUE_UP
#define PNG_FILTER_VALUE_UP
Definition: png.h:42
png_choose_filter
static uint8_t * png_choose_filter(PNGEncContext *s, uint8_t *dst, uint8_t *src, uint8_t *top, int size, int bpp)
Definition: pngenc.c:195
AVPacket::dts
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed.
Definition: packet.h:372
a
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:41
av_crc_get_table
const AVCRC * av_crc_get_table(AVCRCId crc_id)
Get an initialized standard CRC table.
Definition: crc.c:374
OFFSET
#define OFFSET(x)
Definition: pngenc.c:1095
AVPacket::flags
int flags
A combination of AV_PKT_FLAG values.
Definition: packet.h:379
AV_STEREO3D_FLAG_INVERT
#define AV_STEREO3D_FLAG_INVERT
Inverted views, Right/Bottom represents the left view.
Definition: stereo3d.h:167
input
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some input
Definition: filter_design.txt:172
PNGSIG
#define PNGSIG
Definition: png.h:49
input_data
static void input_data(MLPEncodeContext *ctx, void *samples)
Wrapper function for inputting data in two different bit-depths.
Definition: mlpenc.c:1214
lossless_videoencdsp.h
src1
#define src1
Definition: h264pred.c:140
AVCodecContext::bits_per_coded_sample
int bits_per_coded_sample
bits per sample/pixel from the demuxer (needed for huffyuv).
Definition: avcodec.h:1418
PNG_FILTER_VALUE_SUB
#define PNG_FILTER_VALUE_SUB
Definition: png.h:41
AV_PIX_FMT_RGB48BE
@ AV_PIX_FMT_RGB48BE
packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big...
Definition: pixfmt.h:102
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:271
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:366
options
static const AVOption options[]
Definition: pngenc.c:1097
AV_FRAME_DATA_STEREO3D
@ AV_FRAME_DATA_STEREO3D
Stereoscopic 3d metadata.
Definition: frame.h:63
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:437
ff_png_zalloc
void * ff_png_zalloc(void *opaque, unsigned int items, unsigned int size)
Definition: png.c:41
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:209
len
int len
Definition: vorbis_enc_data.h:426
AVCodecContext::height
int height
Definition: avcodec.h:556
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:593
LLVidEncDSPContext
Definition: lossless_videoencdsp.h:25
sub_left_prediction
static void sub_left_prediction(PNGEncContext *c, uint8_t *dst, const uint8_t *src, int bpp, int size)
Definition: pngenc.c:150
PNGEncContext::color_type
int color_type
Definition: pngenc.c:66
avcodec.h
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
AVCOL_PRI_BT470M
@ AVCOL_PRI_BT470M
also FCC Title 47 Code of Federal Regulations 73.682 (a)(20)
Definition: pixfmt.h:456
tag
uint32_t tag
Definition: movenc.c:1595
ret
ret
Definition: filter_design.txt:187
pred
static const float pred[4]
Definition: siprdata.h:259
PNGEncContext::extra_data_size
int extra_data_size
Definition: pngenc.c:74
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:71
AVStereo3D::type
enum AVStereo3DType type
How views are packed within the video.
Definition: stereo3d.h:180
PNGEncContext::bit_depth
int bit_depth
Definition: pngenc.c:65
checksum
static volatile int checksum
Definition: adler32.c:30
PNGEncContext::bytestream_start
uint8_t * bytestream_start
Definition: pngenc.c:54
AVCodecContext
main external API structure.
Definition: avcodec.h:383
AVFrame::height
int height
Definition: frame.h:361
av_packet_new_side_data
uint8_t * av_packet_new_side_data(AVPacket *pkt, enum AVPacketSideDataType type, size_t size)
Allocate new information of a packet.
Definition: avpacket.c:225
ff_get_encode_buffer
int ff_get_encode_buffer(AVCodecContext *avctx, AVPacket *avpkt, int64_t size, int flags)
Get a buffer for a packet.
Definition: encode.c:78
av_crc
uint32_t av_crc(const AVCRC *ctx, uint32_t crc, const uint8_t *buffer, size_t length)
Calculate the CRC of a block.
Definition: crc.c:392
AV_PKT_DATA_NEW_EXTRADATA
@ AV_PKT_DATA_NEW_EXTRADATA
The AV_PKT_DATA_NEW_EXTRADATA is used to notify the codec or the format that the extradata buffer was...
Definition: packet.h:55
AVRational::den
int den
Denominator.
Definition: rational.h:60
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:224
png_get_interlaced_row
static void png_get_interlaced_row(uint8_t *dst, int row_size, int bits_per_pixel, int pass, const uint8_t *src, int width)
Definition: pngenc.c:83
AV_CODEC_CAP_DELAY
#define AV_CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
Definition: codec.h:82
APNG_BLEND_OP_SOURCE
@ APNG_BLEND_OP_SOURCE
Definition: apng.h:37
AV_CRC_32_IEEE_LE
@ AV_CRC_32_IEEE_LE
Definition: crc.h:53
PNGEncContext::last_frame_fctl
APNGFctlChunk last_frame_fctl
Definition: pngenc.c:78
PNGEncContext::dpi
int dpi
Physical pixel density, in dots per inch, if set.
Definition: pngenc.c:61
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AVCodecContext::frame_number
int frame_number
Frame counter, set by libavcodec.
Definition: avcodec.h:1023
AVFrameSideData
Structure to hold side data for an AVFrame.
Definition: frame.h:209
png_enc_init
static av_cold int png_enc_init(AVCodecContext *avctx)
Definition: pngenc.c:989
alpha
static const int16_t alpha[]
Definition: ilbcdata.h:55
AVPacket
This structure stores compressed data.
Definition: packet.h:350
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:410
png.h
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
d
d
Definition: ffmpeg_filter.c:156
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:556
bytestream.h
sub_png_paeth_prediction
static void sub_png_paeth_prediction(uint8_t *dst, uint8_t *src, uint8_t *top, int w, int bpp)
Definition: pngenc.c:122
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:334
PNG_COLOR_TYPE_GRAY_ALPHA
#define PNG_COLOR_TYPE_GRAY_ALPHA
Definition: png.h:37
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
APNGFctlChunk::height
uint32_t height
Definition: pngenc.c:43
MKTAG
#define MKTAG(a, b, c, d)
Definition: macros.h:55
AVStereo3D
Stereo 3D type: this structure describes how two videos are packed within a single video surface,...
Definition: stereo3d.h:176
PNGEncContext::bytestream
uint8_t * bytestream
Definition: pngenc.c:53
PNGEncContext::is_progressive
int is_progressive
Definition: pngenc.c:64
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:233
VE
#define VE
Definition: pngenc.c:1096
ff_alloc_packet
int ff_alloc_packet(AVCodecContext *avctx, AVPacket *avpkt, int64_t size)
Check AVPacket size and allocate data.
Definition: encode.c:34
encode_headers
static int encode_headers(AVCodecContext *avctx, const AVFrame *pict)
Definition: pngenc.c:346
APNGFctlChunk::dispose_op
uint8_t dispose_op
Definition: pngenc.c:46
AVCodecContext::sample_aspect_ratio
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel.
Definition: avcodec.h:753
PNGEncContext::palette_checksum
uint32_t palette_checksum
Definition: pngenc.c:70
PNG_COLOR_TYPE_PALETTE
#define PNG_COLOR_TYPE_PALETTE
Definition: png.h:34
APNGFctlChunk::x_offset
uint32_t x_offset
Definition: pngenc.c:44
PNGEncContext::sequence_number
uint32_t sequence_number
Definition: pngenc.c:71
AVCodecContext::compression_level
int compression_level
Definition: avcodec.h:455
pngenc_class
static const AVClass pngenc_class
Definition: pngenc.c:1110
PNGEncContext::zstream
z_stream zstream
Definition: pngenc.c:59