FFmpeg
pngenc.c
Go to the documentation of this file.
1 /*
2  * PNG image format
3  * Copyright (c) 2003 Fabrice Bellard
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 #include "avcodec.h"
23 #include "internal.h"
24 #include "bytestream.h"
25 #include "lossless_videoencdsp.h"
26 #include "png.h"
27 #include "apng.h"
28 
29 #include "libavutil/avassert.h"
30 #include "libavutil/crc.h"
31 #include "libavutil/libm.h"
32 #include "libavutil/opt.h"
33 #include "libavutil/color_utils.h"
34 #include "libavutil/stereo3d.h"
35 
36 #include <zlib.h>
37 
38 #define IOBUF_SIZE 4096
39 
40 typedef struct APNGFctlChunk {
41  uint32_t sequence_number;
42  uint32_t width, height;
43  uint32_t x_offset, y_offset;
44  uint16_t delay_num, delay_den;
45  uint8_t dispose_op, blend_op;
47 
48 typedef struct PNGEncContext {
49  AVClass *class;
51 
52  uint8_t *bytestream;
53  uint8_t *bytestream_start;
54  uint8_t *bytestream_end;
55 
57 
58  z_stream zstream;
59  uint8_t buf[IOBUF_SIZE];
60  int dpi; ///< Physical pixel density, in dots per inch, if set
61  int dpm; ///< Physical pixel density, in dots per meter, if set
62 
64  int bit_depth;
67 
68  // APNG
69  uint32_t palette_checksum; // Used to ensure a single unique palette
70  uint32_t sequence_number;
72  uint8_t *extra_data;
74 
81 
82 static void png_get_interlaced_row(uint8_t *dst, int row_size,
83  int bits_per_pixel, int pass,
84  const uint8_t *src, int width)
85 {
86  int x, mask, dst_x, j, b, bpp;
87  uint8_t *d;
88  const uint8_t *s;
89  static const int masks[] = {0x80, 0x08, 0x88, 0x22, 0xaa, 0x55, 0xff};
90 
91  mask = masks[pass];
92  switch (bits_per_pixel) {
93  case 1:
94  memset(dst, 0, row_size);
95  dst_x = 0;
96  for (x = 0; x < width; x++) {
97  j = (x & 7);
98  if ((mask << j) & 0x80) {
99  b = (src[x >> 3] >> (7 - j)) & 1;
100  dst[dst_x >> 3] |= b << (7 - (dst_x & 7));
101  dst_x++;
102  }
103  }
104  break;
105  default:
106  bpp = bits_per_pixel >> 3;
107  d = dst;
108  s = src;
109  for (x = 0; x < width; x++) {
110  j = x & 7;
111  if ((mask << j) & 0x80) {
112  memcpy(d, s, bpp);
113  d += bpp;
114  }
115  s += bpp;
116  }
117  break;
118  }
119 }
120 
121 static void sub_png_paeth_prediction(uint8_t *dst, uint8_t *src, uint8_t *top,
122  int w, int bpp)
123 {
124  int i;
125  for (i = 0; i < w; i++) {
126  int a, b, c, p, pa, pb, pc;
127 
128  a = src[i - bpp];
129  b = top[i];
130  c = top[i - bpp];
131 
132  p = b - c;
133  pc = a - c;
134 
135  pa = abs(p);
136  pb = abs(pc);
137  pc = abs(p + pc);
138 
139  if (pa <= pb && pa <= pc)
140  p = a;
141  else if (pb <= pc)
142  p = b;
143  else
144  p = c;
145  dst[i] = src[i] - p;
146  }
147 }
148 
149 static void sub_left_prediction(PNGEncContext *c, uint8_t *dst, const uint8_t *src, int bpp, int size)
150 {
151  const uint8_t *src1 = src + bpp;
152  const uint8_t *src2 = src;
153  int x, unaligned_w;
154 
155  memcpy(dst, src, bpp);
156  dst += bpp;
157  size -= bpp;
158  unaligned_w = FFMIN(32 - bpp, size);
159  for (x = 0; x < unaligned_w; x++)
160  *dst++ = *src1++ - *src2++;
161  size -= unaligned_w;
162  c->llvidencdsp.diff_bytes(dst, src1, src2, size);
163 }
164 
165 static void png_filter_row(PNGEncContext *c, uint8_t *dst, int filter_type,
166  uint8_t *src, uint8_t *top, int size, int bpp)
167 {
168  int i;
169 
170  switch (filter_type) {
172  memcpy(dst, src, size);
173  break;
175  sub_left_prediction(c, dst, src, bpp, size);
176  break;
177  case PNG_FILTER_VALUE_UP:
178  c->llvidencdsp.diff_bytes(dst, src, top, size);
179  break;
181  for (i = 0; i < bpp; i++)
182  dst[i] = src[i] - (top[i] >> 1);
183  for (; i < size; i++)
184  dst[i] = src[i] - ((src[i - bpp] + top[i]) >> 1);
185  break;
187  for (i = 0; i < bpp; i++)
188  dst[i] = src[i] - top[i];
189  sub_png_paeth_prediction(dst + i, src + i, top + i, size - i, bpp);
190  break;
191  }
192 }
193 
194 static uint8_t *png_choose_filter(PNGEncContext *s, uint8_t *dst,
195  uint8_t *src, uint8_t *top, int size, int bpp)
196 {
197  int pred = s->filter_type;
198  av_assert0(bpp || !pred);
199  if (!top && pred)
201  if (pred == PNG_FILTER_VALUE_MIXED) {
202  int i;
203  int cost, bcost = INT_MAX;
204  uint8_t *buf1 = dst, *buf2 = dst + size + 16;
205  for (pred = 0; pred < 5; pred++) {
206  png_filter_row(s, buf1 + 1, pred, src, top, size, bpp);
207  buf1[0] = pred;
208  cost = 0;
209  for (i = 0; i <= size; i++)
210  cost += abs((int8_t) buf1[i]);
211  if (cost < bcost) {
212  bcost = cost;
213  FFSWAP(uint8_t *, buf1, buf2);
214  }
215  }
216  return buf2;
217  } else {
218  png_filter_row(s, dst + 1, pred, src, top, size, bpp);
219  dst[0] = pred;
220  return dst;
221  }
222 }
223 
224 static void png_write_chunk(uint8_t **f, uint32_t tag,
225  const uint8_t *buf, int length)
226 {
227  const AVCRC *crc_table = av_crc_get_table(AV_CRC_32_IEEE_LE);
228  uint32_t crc = ~0U;
229  uint8_t tagbuf[4];
230 
231  bytestream_put_be32(f, length);
232  AV_WL32(tagbuf, tag);
233  crc = av_crc(crc_table, crc, tagbuf, 4);
234  bytestream_put_be32(f, av_bswap32(tag));
235  if (length > 0) {
236  crc = av_crc(crc_table, crc, buf, length);
237  memcpy(*f, buf, length);
238  *f += length;
239  }
240  bytestream_put_be32(f, ~crc);
241 }
242 
244  const uint8_t *buf, int length)
245 {
246  PNGEncContext *s = avctx->priv_data;
247  const AVCRC *crc_table = av_crc_get_table(AV_CRC_32_IEEE_LE);
248  uint32_t crc = ~0U;
249 
250  if (avctx->codec_id == AV_CODEC_ID_PNG || avctx->frame_number == 0) {
251  png_write_chunk(&s->bytestream, MKTAG('I', 'D', 'A', 'T'), buf, length);
252  return;
253  }
254 
255  bytestream_put_be32(&s->bytestream, length + 4);
256 
257  bytestream_put_be32(&s->bytestream, MKBETAG('f', 'd', 'A', 'T'));
258  bytestream_put_be32(&s->bytestream, s->sequence_number);
259  crc = av_crc(crc_table, crc, s->bytestream - 8, 8);
260 
261  crc = av_crc(crc_table, crc, buf, length);
262  memcpy(s->bytestream, buf, length);
263  s->bytestream += length;
264 
265  bytestream_put_be32(&s->bytestream, ~crc);
266 
267  ++s->sequence_number;
268 }
269 
270 /* XXX: do filtering */
271 static int png_write_row(AVCodecContext *avctx, const uint8_t *data, int size)
272 {
273  PNGEncContext *s = avctx->priv_data;
274  int ret;
275 
276  s->zstream.avail_in = size;
277  s->zstream.next_in = data;
278  while (s->zstream.avail_in > 0) {
279  ret = deflate(&s->zstream, Z_NO_FLUSH);
280  if (ret != Z_OK)
281  return -1;
282  if (s->zstream.avail_out == 0) {
283  if (s->bytestream_end - s->bytestream > IOBUF_SIZE + 100)
284  png_write_image_data(avctx, s->buf, IOBUF_SIZE);
285  s->zstream.avail_out = IOBUF_SIZE;
286  s->zstream.next_out = s->buf;
287  }
288  }
289  return 0;
290 }
291 
292 #define AV_WB32_PNG(buf, n) AV_WB32(buf, lrint((n) * 100000))
293 static int png_get_chrm(enum AVColorPrimaries prim, uint8_t *buf)
294 {
295  double rx, ry, gx, gy, bx, by, wx = 0.3127, wy = 0.3290;
296  switch (prim) {
297  case AVCOL_PRI_BT709:
298  rx = 0.640; ry = 0.330;
299  gx = 0.300; gy = 0.600;
300  bx = 0.150; by = 0.060;
301  break;
302  case AVCOL_PRI_BT470M:
303  rx = 0.670; ry = 0.330;
304  gx = 0.210; gy = 0.710;
305  bx = 0.140; by = 0.080;
306  wx = 0.310; wy = 0.316;
307  break;
308  case AVCOL_PRI_BT470BG:
309  rx = 0.640; ry = 0.330;
310  gx = 0.290; gy = 0.600;
311  bx = 0.150; by = 0.060;
312  break;
313  case AVCOL_PRI_SMPTE170M:
314  case AVCOL_PRI_SMPTE240M:
315  rx = 0.630; ry = 0.340;
316  gx = 0.310; gy = 0.595;
317  bx = 0.155; by = 0.070;
318  break;
319  case AVCOL_PRI_BT2020:
320  rx = 0.708; ry = 0.292;
321  gx = 0.170; gy = 0.797;
322  bx = 0.131; by = 0.046;
323  break;
324  default:
325  return 0;
326  }
327 
328  AV_WB32_PNG(buf , wx); AV_WB32_PNG(buf + 4 , wy);
329  AV_WB32_PNG(buf + 8 , rx); AV_WB32_PNG(buf + 12, ry);
330  AV_WB32_PNG(buf + 16, gx); AV_WB32_PNG(buf + 20, gy);
331  AV_WB32_PNG(buf + 24, bx); AV_WB32_PNG(buf + 28, by);
332  return 1;
333 }
334 
335 static int png_get_gama(enum AVColorTransferCharacteristic trc, uint8_t *buf)
336 {
337  double gamma = avpriv_get_gamma_from_trc(trc);
338  if (gamma <= 1e-6)
339  return 0;
340 
341  AV_WB32_PNG(buf, 1.0 / gamma);
342  return 1;
343 }
344 
345 static int encode_headers(AVCodecContext *avctx, const AVFrame *pict)
346 {
347  AVFrameSideData *side_data;
348  PNGEncContext *s = avctx->priv_data;
349 
350  /* write png header */
351  AV_WB32(s->buf, avctx->width);
352  AV_WB32(s->buf + 4, avctx->height);
353  s->buf[8] = s->bit_depth;
354  s->buf[9] = s->color_type;
355  s->buf[10] = 0; /* compression type */
356  s->buf[11] = 0; /* filter type */
357  s->buf[12] = s->is_progressive; /* interlace type */
358  png_write_chunk(&s->bytestream, MKTAG('I', 'H', 'D', 'R'), s->buf, 13);
359 
360  /* write physical information */
361  if (s->dpm) {
362  AV_WB32(s->buf, s->dpm);
363  AV_WB32(s->buf + 4, s->dpm);
364  s->buf[8] = 1; /* unit specifier is meter */
365  } else {
366  AV_WB32(s->buf, avctx->sample_aspect_ratio.num);
367  AV_WB32(s->buf + 4, avctx->sample_aspect_ratio.den);
368  s->buf[8] = 0; /* unit specifier is unknown */
369  }
370  png_write_chunk(&s->bytestream, MKTAG('p', 'H', 'Y', 's'), s->buf, 9);
371 
372  /* write stereoscopic information */
374  if (side_data) {
375  AVStereo3D *stereo3d = (AVStereo3D *)side_data->data;
376  switch (stereo3d->type) {
378  s->buf[0] = ((stereo3d->flags & AV_STEREO3D_FLAG_INVERT) == 0) ? 1 : 0;
379  png_write_chunk(&s->bytestream, MKTAG('s', 'T', 'E', 'R'), s->buf, 1);
380  break;
381  case AV_STEREO3D_2D:
382  break;
383  default:
384  av_log(avctx, AV_LOG_WARNING, "Only side-by-side stereo3d flag can be defined within sTER chunk\n");
385  break;
386  }
387  }
388 
389  /* write colorspace information */
390  if (pict->color_primaries == AVCOL_PRI_BT709 &&
392  s->buf[0] = 1; /* rendering intent, relative colorimetric by default */
393  png_write_chunk(&s->bytestream, MKTAG('s', 'R', 'G', 'B'), s->buf, 1);
394  }
395 
396  if (png_get_chrm(pict->color_primaries, s->buf))
397  png_write_chunk(&s->bytestream, MKTAG('c', 'H', 'R', 'M'), s->buf, 32);
398  if (png_get_gama(pict->color_trc, s->buf))
399  png_write_chunk(&s->bytestream, MKTAG('g', 'A', 'M', 'A'), s->buf, 4);
400 
401  /* put the palette if needed */
402  if (s->color_type == PNG_COLOR_TYPE_PALETTE) {
403  int has_alpha, alpha, i;
404  unsigned int v;
405  uint32_t *palette;
406  uint8_t *ptr, *alpha_ptr;
407 
408  palette = (uint32_t *)pict->data[1];
409  ptr = s->buf;
410  alpha_ptr = s->buf + 256 * 3;
411  has_alpha = 0;
412  for (i = 0; i < 256; i++) {
413  v = palette[i];
414  alpha = v >> 24;
415  if (alpha != 0xff)
416  has_alpha = 1;
417  *alpha_ptr++ = alpha;
418  bytestream_put_be24(&ptr, v);
419  }
420  png_write_chunk(&s->bytestream,
421  MKTAG('P', 'L', 'T', 'E'), s->buf, 256 * 3);
422  if (has_alpha) {
423  png_write_chunk(&s->bytestream,
424  MKTAG('t', 'R', 'N', 'S'), s->buf + 256 * 3, 256);
425  }
426  }
427 
428  return 0;
429 }
430 
431 static int encode_frame(AVCodecContext *avctx, const AVFrame *pict)
432 {
433  PNGEncContext *s = avctx->priv_data;
434  const AVFrame *const p = pict;
435  int y, len, ret;
436  int row_size, pass_row_size;
437  uint8_t *ptr, *top, *crow_buf, *crow;
438  uint8_t *crow_base = NULL;
439  uint8_t *progressive_buf = NULL;
440  uint8_t *top_buf = NULL;
441 
442  row_size = (pict->width * s->bits_per_pixel + 7) >> 3;
443 
444  crow_base = av_malloc((row_size + 32) << (s->filter_type == PNG_FILTER_VALUE_MIXED));
445  if (!crow_base) {
446  ret = AVERROR(ENOMEM);
447  goto the_end;
448  }
449  // pixel data should be aligned, but there's a control byte before it
450  crow_buf = crow_base + 15;
451  if (s->is_progressive) {
452  progressive_buf = av_malloc(row_size + 1);
453  top_buf = av_malloc(row_size + 1);
454  if (!progressive_buf || !top_buf) {
455  ret = AVERROR(ENOMEM);
456  goto the_end;
457  }
458  }
459 
460  /* put each row */
461  s->zstream.avail_out = IOBUF_SIZE;
462  s->zstream.next_out = s->buf;
463  if (s->is_progressive) {
464  int pass;
465 
466  for (pass = 0; pass < NB_PASSES; pass++) {
467  /* NOTE: a pass is completely omitted if no pixels would be
468  * output */
469  pass_row_size = ff_png_pass_row_size(pass, s->bits_per_pixel, pict->width);
470  if (pass_row_size > 0) {
471  top = NULL;
472  for (y = 0; y < pict->height; y++)
473  if ((ff_png_pass_ymask[pass] << (y & 7)) & 0x80) {
474  ptr = p->data[0] + y * p->linesize[0];
475  FFSWAP(uint8_t *, progressive_buf, top_buf);
476  png_get_interlaced_row(progressive_buf, pass_row_size,
477  s->bits_per_pixel, pass,
478  ptr, pict->width);
479  crow = png_choose_filter(s, crow_buf, progressive_buf,
480  top, pass_row_size, s->bits_per_pixel >> 3);
481  png_write_row(avctx, crow, pass_row_size + 1);
482  top = progressive_buf;
483  }
484  }
485  }
486  } else {
487  top = NULL;
488  for (y = 0; y < pict->height; y++) {
489  ptr = p->data[0] + y * p->linesize[0];
490  crow = png_choose_filter(s, crow_buf, ptr, top,
491  row_size, s->bits_per_pixel >> 3);
492  png_write_row(avctx, crow, row_size + 1);
493  top = ptr;
494  }
495  }
496  /* compress last bytes */
497  for (;;) {
498  ret = deflate(&s->zstream, Z_FINISH);
499  if (ret == Z_OK || ret == Z_STREAM_END) {
500  len = IOBUF_SIZE - s->zstream.avail_out;
501  if (len > 0 && s->bytestream_end - s->bytestream > len + 100) {
502  png_write_image_data(avctx, s->buf, len);
503  }
504  s->zstream.avail_out = IOBUF_SIZE;
505  s->zstream.next_out = s->buf;
506  if (ret == Z_STREAM_END)
507  break;
508  } else {
509  ret = -1;
510  goto the_end;
511  }
512  }
513 
514  ret = 0;
515 
516 the_end:
517  av_freep(&crow_base);
518  av_freep(&progressive_buf);
519  av_freep(&top_buf);
520  deflateReset(&s->zstream);
521  return ret;
522 }
523 
524 static int encode_png(AVCodecContext *avctx, AVPacket *pkt,
525  const AVFrame *pict, int *got_packet)
526 {
527  PNGEncContext *s = avctx->priv_data;
528  int ret;
529  int enc_row_size;
530  size_t max_packet_size;
531 
532  enc_row_size = deflateBound(&s->zstream, (avctx->width * s->bits_per_pixel + 7) >> 3);
533  max_packet_size =
534  AV_INPUT_BUFFER_MIN_SIZE + // headers
535  avctx->height * (
536  enc_row_size +
537  12 * (((int64_t)enc_row_size + IOBUF_SIZE - 1) / IOBUF_SIZE) // IDAT * ceil(enc_row_size / IOBUF_SIZE)
538  );
539  if (max_packet_size > INT_MAX)
540  return AVERROR(ENOMEM);
541  ret = ff_alloc_packet2(avctx, pkt, max_packet_size, 0);
542  if (ret < 0)
543  return ret;
544 
545  s->bytestream_start =
546  s->bytestream = pkt->data;
547  s->bytestream_end = pkt->data + pkt->size;
548 
549  AV_WB64(s->bytestream, PNGSIG);
550  s->bytestream += 8;
551 
552  ret = encode_headers(avctx, pict);
553  if (ret < 0)
554  return ret;
555 
556  ret = encode_frame(avctx, pict);
557  if (ret < 0)
558  return ret;
559 
560  png_write_chunk(&s->bytestream, MKTAG('I', 'E', 'N', 'D'), NULL, 0);
561 
562  pkt->size = s->bytestream - s->bytestream_start;
564  *got_packet = 1;
565 
566  return 0;
567 }
568 
570  APNGFctlChunk *fctl_chunk, uint8_t bpp)
571 {
572  // output: background, input: foreground
573  // output the image such that when blended with the background, will produce the foreground
574 
575  unsigned int x, y;
576  unsigned int leftmost_x = input->width;
577  unsigned int rightmost_x = 0;
578  unsigned int topmost_y = input->height;
579  unsigned int bottommost_y = 0;
580  const uint8_t *input_data = input->data[0];
581  uint8_t *output_data = output->data[0];
582  ptrdiff_t input_linesize = input->linesize[0];
583  ptrdiff_t output_linesize = output->linesize[0];
584 
585  // Find bounding box of changes
586  for (y = 0; y < input->height; ++y) {
587  for (x = 0; x < input->width; ++x) {
588  if (!memcmp(input_data + bpp * x, output_data + bpp * x, bpp))
589  continue;
590 
591  if (x < leftmost_x)
592  leftmost_x = x;
593  if (x >= rightmost_x)
594  rightmost_x = x + 1;
595  if (y < topmost_y)
596  topmost_y = y;
597  if (y >= bottommost_y)
598  bottommost_y = y + 1;
599  }
600 
601  input_data += input_linesize;
602  output_data += output_linesize;
603  }
604 
605  if (leftmost_x == input->width && rightmost_x == 0) {
606  // Empty frame
607  // APNG does not support empty frames, so we make it a 1x1 frame
608  leftmost_x = topmost_y = 0;
609  rightmost_x = bottommost_y = 1;
610  }
611 
612  // Do actual inverse blending
613  if (fctl_chunk->blend_op == APNG_BLEND_OP_SOURCE) {
614  output_data = output->data[0];
615  for (y = topmost_y; y < bottommost_y; ++y) {
616  memcpy(output_data,
617  input->data[0] + input_linesize * y + bpp * leftmost_x,
618  bpp * (rightmost_x - leftmost_x));
619  output_data += output_linesize;
620  }
621  } else { // APNG_BLEND_OP_OVER
622  size_t transparent_palette_index;
623  uint32_t *palette;
624 
625  switch (input->format) {
626  case AV_PIX_FMT_RGBA64BE:
627  case AV_PIX_FMT_YA16BE:
628  case AV_PIX_FMT_RGBA:
629  case AV_PIX_FMT_GRAY8A:
630  break;
631 
632  case AV_PIX_FMT_PAL8:
633  palette = (uint32_t*)input->data[1];
634  for (transparent_palette_index = 0; transparent_palette_index < 256; ++transparent_palette_index)
635  if (palette[transparent_palette_index] >> 24 == 0)
636  break;
637  break;
638 
639  default:
640  // No alpha, so blending not possible
641  return -1;
642  }
643 
644  for (y = topmost_y; y < bottommost_y; ++y) {
645  uint8_t *foreground = input->data[0] + input_linesize * y + bpp * leftmost_x;
646  uint8_t *background = output->data[0] + output_linesize * y + bpp * leftmost_x;
647  output_data = output->data[0] + output_linesize * (y - topmost_y);
648  for (x = leftmost_x; x < rightmost_x; ++x, foreground += bpp, background += bpp, output_data += bpp) {
649  if (!memcmp(foreground, background, bpp)) {
650  if (input->format == AV_PIX_FMT_PAL8) {
651  if (transparent_palette_index == 256) {
652  // Need fully transparent colour, but none exists
653  return -1;
654  }
655 
656  *output_data = transparent_palette_index;
657  } else {
658  memset(output_data, 0, bpp);
659  }
660  continue;
661  }
662 
663  // Check for special alpha values, since full inverse
664  // alpha-on-alpha blending is rarely possible, and when
665  // possible, doesn't compress much better than
666  // APNG_BLEND_OP_SOURCE blending
667  switch (input->format) {
668  case AV_PIX_FMT_RGBA64BE:
669  if (((uint16_t*)foreground)[3] == 0xffff ||
670  ((uint16_t*)background)[3] == 0)
671  break;
672  return -1;
673 
674  case AV_PIX_FMT_YA16BE:
675  if (((uint16_t*)foreground)[1] == 0xffff ||
676  ((uint16_t*)background)[1] == 0)
677  break;
678  return -1;
679 
680  case AV_PIX_FMT_RGBA:
681  if (foreground[3] == 0xff || background[3] == 0)
682  break;
683  return -1;
684 
685  case AV_PIX_FMT_GRAY8A:
686  if (foreground[1] == 0xff || background[1] == 0)
687  break;
688  return -1;
689 
690  case AV_PIX_FMT_PAL8:
691  if (palette[*foreground] >> 24 == 0xff ||
692  palette[*background] >> 24 == 0)
693  break;
694  return -1;
695  }
696 
697  memmove(output_data, foreground, bpp);
698  }
699  }
700  }
701 
702  output->width = rightmost_x - leftmost_x;
703  output->height = bottommost_y - topmost_y;
704  fctl_chunk->width = output->width;
705  fctl_chunk->height = output->height;
706  fctl_chunk->x_offset = leftmost_x;
707  fctl_chunk->y_offset = topmost_y;
708 
709  return 0;
710 }
711 
712 static int apng_encode_frame(AVCodecContext *avctx, const AVFrame *pict,
713  APNGFctlChunk *best_fctl_chunk, APNGFctlChunk *best_last_fctl_chunk)
714 {
715  PNGEncContext *s = avctx->priv_data;
716  int ret;
717  unsigned int y;
718  AVFrame* diffFrame;
719  uint8_t bpp = (s->bits_per_pixel + 7) >> 3;
720  uint8_t *original_bytestream, *original_bytestream_end;
721  uint8_t *temp_bytestream = 0, *temp_bytestream_end;
722  uint32_t best_sequence_number;
723  uint8_t *best_bytestream;
724  size_t best_bytestream_size = SIZE_MAX;
725  APNGFctlChunk last_fctl_chunk = *best_last_fctl_chunk;
726  APNGFctlChunk fctl_chunk = *best_fctl_chunk;
727 
728  if (avctx->frame_number == 0) {
729  best_fctl_chunk->width = pict->width;
730  best_fctl_chunk->height = pict->height;
731  best_fctl_chunk->x_offset = 0;
732  best_fctl_chunk->y_offset = 0;
733  best_fctl_chunk->blend_op = APNG_BLEND_OP_SOURCE;
734  return encode_frame(avctx, pict);
735  }
736 
737  diffFrame = av_frame_alloc();
738  if (!diffFrame)
739  return AVERROR(ENOMEM);
740 
741  diffFrame->format = pict->format;
742  diffFrame->width = pict->width;
743  diffFrame->height = pict->height;
744  if ((ret = av_frame_get_buffer(diffFrame, 0)) < 0)
745  goto fail;
746 
747  original_bytestream = s->bytestream;
748  original_bytestream_end = s->bytestream_end;
749 
750  temp_bytestream = av_malloc(original_bytestream_end - original_bytestream);
751  if (!temp_bytestream) {
752  ret = AVERROR(ENOMEM);
753  goto fail;
754  }
755  temp_bytestream_end = temp_bytestream + (original_bytestream_end - original_bytestream);
756 
757  for (last_fctl_chunk.dispose_op = 0; last_fctl_chunk.dispose_op < 3; ++last_fctl_chunk.dispose_op) {
758  // 0: APNG_DISPOSE_OP_NONE
759  // 1: APNG_DISPOSE_OP_BACKGROUND
760  // 2: APNG_DISPOSE_OP_PREVIOUS
761 
762  for (fctl_chunk.blend_op = 0; fctl_chunk.blend_op < 2; ++fctl_chunk.blend_op) {
763  // 0: APNG_BLEND_OP_SOURCE
764  // 1: APNG_BLEND_OP_OVER
765 
766  uint32_t original_sequence_number = s->sequence_number, sequence_number;
767  uint8_t *bytestream_start = s->bytestream;
768  size_t bytestream_size;
769 
770  // Do disposal
771  if (last_fctl_chunk.dispose_op != APNG_DISPOSE_OP_PREVIOUS) {
772  diffFrame->width = pict->width;
773  diffFrame->height = pict->height;
774  ret = av_frame_copy(diffFrame, s->last_frame);
775  if (ret < 0)
776  goto fail;
777 
778  if (last_fctl_chunk.dispose_op == APNG_DISPOSE_OP_BACKGROUND) {
779  for (y = last_fctl_chunk.y_offset; y < last_fctl_chunk.y_offset + last_fctl_chunk.height; ++y) {
780  size_t row_start = diffFrame->linesize[0] * y + bpp * last_fctl_chunk.x_offset;
781  memset(diffFrame->data[0] + row_start, 0, bpp * last_fctl_chunk.width);
782  }
783  }
784  } else {
785  if (!s->prev_frame)
786  continue;
787 
788  diffFrame->width = pict->width;
789  diffFrame->height = pict->height;
790  ret = av_frame_copy(diffFrame, s->prev_frame);
791  if (ret < 0)
792  goto fail;
793  }
794 
795  // Do inverse blending
796  if (apng_do_inverse_blend(diffFrame, pict, &fctl_chunk, bpp) < 0)
797  continue;
798 
799  // Do encoding
800  ret = encode_frame(avctx, diffFrame);
801  sequence_number = s->sequence_number;
802  s->sequence_number = original_sequence_number;
803  bytestream_size = s->bytestream - bytestream_start;
804  s->bytestream = bytestream_start;
805  if (ret < 0)
806  goto fail;
807 
808  if (bytestream_size < best_bytestream_size) {
809  *best_fctl_chunk = fctl_chunk;
810  *best_last_fctl_chunk = last_fctl_chunk;
811 
812  best_sequence_number = sequence_number;
813  best_bytestream = s->bytestream;
814  best_bytestream_size = bytestream_size;
815 
816  if (best_bytestream == original_bytestream) {
817  s->bytestream = temp_bytestream;
818  s->bytestream_end = temp_bytestream_end;
819  } else {
820  s->bytestream = original_bytestream;
821  s->bytestream_end = original_bytestream_end;
822  }
823  }
824  }
825  }
826 
827  s->sequence_number = best_sequence_number;
828  s->bytestream = original_bytestream + best_bytestream_size;
829  s->bytestream_end = original_bytestream_end;
830  if (best_bytestream != original_bytestream)
831  memcpy(original_bytestream, best_bytestream, best_bytestream_size);
832 
833  ret = 0;
834 
835 fail:
836  av_freep(&temp_bytestream);
837  av_frame_free(&diffFrame);
838  return ret;
839 }
840 
842  const AVFrame *pict, int *got_packet)
843 {
844  PNGEncContext *s = avctx->priv_data;
845  int ret;
846  int enc_row_size;
847  size_t max_packet_size;
848  APNGFctlChunk fctl_chunk = {0};
849 
850  if (pict && avctx->codec_id == AV_CODEC_ID_APNG && s->color_type == PNG_COLOR_TYPE_PALETTE) {
851  uint32_t checksum = ~av_crc(av_crc_get_table(AV_CRC_32_IEEE_LE), ~0U, pict->data[1], 256 * sizeof(uint32_t));
852 
853  if (avctx->frame_number == 0) {
854  s->palette_checksum = checksum;
855  } else if (checksum != s->palette_checksum) {
856  av_log(avctx, AV_LOG_ERROR,
857  "Input contains more than one unique palette. APNG does not support multiple palettes.\n");
858  return -1;
859  }
860  }
861 
862  enc_row_size = deflateBound(&s->zstream, (avctx->width * s->bits_per_pixel + 7) >> 3);
863  max_packet_size =
864  AV_INPUT_BUFFER_MIN_SIZE + // headers
865  avctx->height * (
866  enc_row_size +
867  (4 + 12) * (((int64_t)enc_row_size + IOBUF_SIZE - 1) / IOBUF_SIZE) // fdAT * ceil(enc_row_size / IOBUF_SIZE)
868  );
869  if (max_packet_size > INT_MAX)
870  return AVERROR(ENOMEM);
871 
872  if (avctx->frame_number == 0) {
873  if (!pict)
874  return AVERROR(EINVAL);
875 
876  s->bytestream = s->extra_data = av_malloc(AV_INPUT_BUFFER_MIN_SIZE);
877  if (!s->extra_data)
878  return AVERROR(ENOMEM);
879 
880  ret = encode_headers(avctx, pict);
881  if (ret < 0)
882  return ret;
883 
884  s->extra_data_size = s->bytestream - s->extra_data;
885 
886  s->last_frame_packet = av_malloc(max_packet_size);
887  if (!s->last_frame_packet)
888  return AVERROR(ENOMEM);
889  } else if (s->last_frame) {
890  ret = ff_alloc_packet2(avctx, pkt, max_packet_size, 0);
891  if (ret < 0)
892  return ret;
893 
894  memcpy(pkt->data, s->last_frame_packet, s->last_frame_packet_size);
895  pkt->size = s->last_frame_packet_size;
896  pkt->pts = pkt->dts = s->last_frame->pts;
897  }
898 
899  if (pict) {
900  s->bytestream_start =
901  s->bytestream = s->last_frame_packet;
902  s->bytestream_end = s->bytestream + max_packet_size;
903 
904  // We're encoding the frame first, so we have to do a bit of shuffling around
905  // to have the image data write to the correct place in the buffer
906  fctl_chunk.sequence_number = s->sequence_number;
907  ++s->sequence_number;
908  s->bytestream += 26 + 12;
909 
910  ret = apng_encode_frame(avctx, pict, &fctl_chunk, &s->last_frame_fctl);
911  if (ret < 0)
912  return ret;
913 
914  fctl_chunk.delay_num = 0; // delay filled in during muxing
915  fctl_chunk.delay_den = 0;
916  } else {
917  s->last_frame_fctl.dispose_op = APNG_DISPOSE_OP_NONE;
918  }
919 
920  if (s->last_frame) {
921  uint8_t* last_fctl_chunk_start = pkt->data;
922  uint8_t buf[26];
923  if (!s->extra_data_updated) {
924  uint8_t *side_data = av_packet_new_side_data(pkt, AV_PKT_DATA_NEW_EXTRADATA, s->extra_data_size);
925  if (!side_data)
926  return AVERROR(ENOMEM);
927  memcpy(side_data, s->extra_data, s->extra_data_size);
928  s->extra_data_updated = 1;
929  }
930 
931  AV_WB32(buf + 0, s->last_frame_fctl.sequence_number);
932  AV_WB32(buf + 4, s->last_frame_fctl.width);
933  AV_WB32(buf + 8, s->last_frame_fctl.height);
934  AV_WB32(buf + 12, s->last_frame_fctl.x_offset);
935  AV_WB32(buf + 16, s->last_frame_fctl.y_offset);
936  AV_WB16(buf + 20, s->last_frame_fctl.delay_num);
937  AV_WB16(buf + 22, s->last_frame_fctl.delay_den);
938  buf[24] = s->last_frame_fctl.dispose_op;
939  buf[25] = s->last_frame_fctl.blend_op;
940  png_write_chunk(&last_fctl_chunk_start, MKTAG('f', 'c', 'T', 'L'), buf, 26);
941 
942  *got_packet = 1;
943  }
944 
945  if (pict) {
946  if (!s->last_frame) {
947  s->last_frame = av_frame_alloc();
948  if (!s->last_frame)
949  return AVERROR(ENOMEM);
950  } else if (s->last_frame_fctl.dispose_op != APNG_DISPOSE_OP_PREVIOUS) {
951  if (!s->prev_frame) {
952  s->prev_frame = av_frame_alloc();
953  if (!s->prev_frame)
954  return AVERROR(ENOMEM);
955 
956  s->prev_frame->format = pict->format;
957  s->prev_frame->width = pict->width;
958  s->prev_frame->height = pict->height;
959  if ((ret = av_frame_get_buffer(s->prev_frame, 0)) < 0)
960  return ret;
961  }
962 
963  // Do disposal, but not blending
964  av_frame_copy(s->prev_frame, s->last_frame);
965  if (s->last_frame_fctl.dispose_op == APNG_DISPOSE_OP_BACKGROUND) {
966  uint32_t y;
967  uint8_t bpp = (s->bits_per_pixel + 7) >> 3;
968  for (y = s->last_frame_fctl.y_offset; y < s->last_frame_fctl.y_offset + s->last_frame_fctl.height; ++y) {
969  size_t row_start = s->prev_frame->linesize[0] * y + bpp * s->last_frame_fctl.x_offset;
970  memset(s->prev_frame->data[0] + row_start, 0, bpp * s->last_frame_fctl.width);
971  }
972  }
973  }
974 
975  av_frame_unref(s->last_frame);
976  ret = av_frame_ref(s->last_frame, (AVFrame*)pict);
977  if (ret < 0)
978  return ret;
979 
980  s->last_frame_fctl = fctl_chunk;
981  s->last_frame_packet_size = s->bytestream - s->bytestream_start;
982  } else {
983  av_frame_free(&s->last_frame);
984  }
985 
986  return 0;
987 }
988 
990 {
991  PNGEncContext *s = avctx->priv_data;
992  int compression_level;
993 
994  switch (avctx->pix_fmt) {
995  case AV_PIX_FMT_RGBA:
996  avctx->bits_per_coded_sample = 32;
997  break;
998  case AV_PIX_FMT_RGB24:
999  avctx->bits_per_coded_sample = 24;
1000  break;
1001  case AV_PIX_FMT_GRAY8:
1002  avctx->bits_per_coded_sample = 0x28;
1003  break;
1004  case AV_PIX_FMT_MONOBLACK:
1005  avctx->bits_per_coded_sample = 1;
1006  break;
1007  case AV_PIX_FMT_PAL8:
1008  avctx->bits_per_coded_sample = 8;
1009  }
1010 
1011  ff_llvidencdsp_init(&s->llvidencdsp);
1012 
1013  if (avctx->pix_fmt == AV_PIX_FMT_MONOBLACK)
1014  s->filter_type = PNG_FILTER_VALUE_NONE;
1015 
1016  if (s->dpi && s->dpm) {
1017  av_log(avctx, AV_LOG_ERROR, "Only one of 'dpi' or 'dpm' options should be set\n");
1018  return AVERROR(EINVAL);
1019  } else if (s->dpi) {
1020  s->dpm = s->dpi * 10000 / 254;
1021  }
1022 
1023  s->is_progressive = !!(avctx->flags & AV_CODEC_FLAG_INTERLACED_DCT);
1024  switch (avctx->pix_fmt) {
1025  case AV_PIX_FMT_RGBA64BE:
1026  s->bit_depth = 16;
1027  s->color_type = PNG_COLOR_TYPE_RGB_ALPHA;
1028  break;
1029  case AV_PIX_FMT_RGB48BE:
1030  s->bit_depth = 16;
1031  s->color_type = PNG_COLOR_TYPE_RGB;
1032  break;
1033  case AV_PIX_FMT_RGBA:
1034  s->bit_depth = 8;
1035  s->color_type = PNG_COLOR_TYPE_RGB_ALPHA;
1036  break;
1037  case AV_PIX_FMT_RGB24:
1038  s->bit_depth = 8;
1039  s->color_type = PNG_COLOR_TYPE_RGB;
1040  break;
1041  case AV_PIX_FMT_GRAY16BE:
1042  s->bit_depth = 16;
1043  s->color_type = PNG_COLOR_TYPE_GRAY;
1044  break;
1045  case AV_PIX_FMT_GRAY8:
1046  s->bit_depth = 8;
1047  s->color_type = PNG_COLOR_TYPE_GRAY;
1048  break;
1049  case AV_PIX_FMT_GRAY8A:
1050  s->bit_depth = 8;
1051  s->color_type = PNG_COLOR_TYPE_GRAY_ALPHA;
1052  break;
1053  case AV_PIX_FMT_YA16BE:
1054  s->bit_depth = 16;
1055  s->color_type = PNG_COLOR_TYPE_GRAY_ALPHA;
1056  break;
1057  case AV_PIX_FMT_MONOBLACK:
1058  s->bit_depth = 1;
1059  s->color_type = PNG_COLOR_TYPE_GRAY;
1060  break;
1061  case AV_PIX_FMT_PAL8:
1062  s->bit_depth = 8;
1063  s->color_type = PNG_COLOR_TYPE_PALETTE;
1064  break;
1065  default:
1066  return -1;
1067  }
1068  s->bits_per_pixel = ff_png_get_nb_channels(s->color_type) * s->bit_depth;
1069 
1070  s->zstream.zalloc = ff_png_zalloc;
1071  s->zstream.zfree = ff_png_zfree;
1072  s->zstream.opaque = NULL;
1073  compression_level = avctx->compression_level == FF_COMPRESSION_DEFAULT
1074  ? Z_DEFAULT_COMPRESSION
1075  : av_clip(avctx->compression_level, 0, 9);
1076  if (deflateInit2(&s->zstream, compression_level, Z_DEFLATED, 15, 8, Z_DEFAULT_STRATEGY) != Z_OK)
1077  return -1;
1078 
1079  return 0;
1080 }
1081 
1083 {
1084  PNGEncContext *s = avctx->priv_data;
1085 
1086  deflateEnd(&s->zstream);
1087  av_frame_free(&s->last_frame);
1088  av_frame_free(&s->prev_frame);
1089  av_freep(&s->last_frame_packet);
1090  av_freep(&s->extra_data);
1091  s->extra_data_size = 0;
1092  return 0;
1093 }
1094 
1095 #define OFFSET(x) offsetof(PNGEncContext, x)
1096 #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
1097 static const AVOption options[] = {
1098  {"dpi", "Set image resolution (in dots per inch)", OFFSET(dpi), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 0x10000, VE},
1099  {"dpm", "Set image resolution (in dots per meter)", OFFSET(dpm), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 0x10000, VE},
1100  { "pred", "Prediction method", OFFSET(filter_type), AV_OPT_TYPE_INT, { .i64 = PNG_FILTER_VALUE_NONE }, PNG_FILTER_VALUE_NONE, PNG_FILTER_VALUE_MIXED, VE, "pred" },
1101  { "none", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_NONE }, INT_MIN, INT_MAX, VE, "pred" },
1102  { "sub", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_SUB }, INT_MIN, INT_MAX, VE, "pred" },
1103  { "up", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_UP }, INT_MIN, INT_MAX, VE, "pred" },
1104  { "avg", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_AVG }, INT_MIN, INT_MAX, VE, "pred" },
1105  { "paeth", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_PAETH }, INT_MIN, INT_MAX, VE, "pred" },
1106  { "mixed", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_MIXED }, INT_MIN, INT_MAX, VE, "pred" },
1107  { NULL},
1108 };
1109 
1110 static const AVClass pngenc_class = {
1111  .class_name = "PNG encoder",
1112  .item_name = av_default_item_name,
1113  .option = options,
1114  .version = LIBAVUTIL_VERSION_INT,
1115 };
1116 
1117 static const AVClass apngenc_class = {
1118  .class_name = "APNG encoder",
1119  .item_name = av_default_item_name,
1120  .option = options,
1121  .version = LIBAVUTIL_VERSION_INT,
1122 };
1123 
1125  .name = "png",
1126  .long_name = NULL_IF_CONFIG_SMALL("PNG (Portable Network Graphics) image"),
1127  .type = AVMEDIA_TYPE_VIDEO,
1128  .id = AV_CODEC_ID_PNG,
1129  .priv_data_size = sizeof(PNGEncContext),
1130  .init = png_enc_init,
1131  .close = png_enc_close,
1132  .encode2 = encode_png,
1133  .capabilities = AV_CODEC_CAP_FRAME_THREADS,
1134  .pix_fmts = (const enum AVPixelFormat[]) {
1141  },
1142  .priv_class = &pngenc_class,
1143  .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE,
1144 };
1145 
1147  .name = "apng",
1148  .long_name = NULL_IF_CONFIG_SMALL("APNG (Animated Portable Network Graphics) image"),
1149  .type = AVMEDIA_TYPE_VIDEO,
1150  .id = AV_CODEC_ID_APNG,
1151  .priv_data_size = sizeof(PNGEncContext),
1152  .init = png_enc_init,
1153  .close = png_enc_close,
1154  .encode2 = encode_apng,
1155  .capabilities = AV_CODEC_CAP_DELAY,
1156  .pix_fmts = (const enum AVPixelFormat[]) {
1163  },
1164  .priv_class = &apngenc_class,
1165  .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE,
1166 };
AVFrame::color_trc
enum AVColorTransferCharacteristic color_trc
Definition: frame.h:534
AVCodec
AVCodec.
Definition: codec.h:197
encode_frame
static int encode_frame(AVCodecContext *avctx, const AVFrame *pict)
Definition: pngenc.c:431
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:187
FF_CODEC_CAP_INIT_THREADSAFE
#define FF_CODEC_CAP_INIT_THREADSAFE
The codec does not modify any global variables in the init function, allowing to call the init functi...
Definition: internal.h:41
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
av_clip
#define av_clip
Definition: common.h:122
init
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:31
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
PNGEncContext::buf
uint8_t buf[IOBUF_SIZE]
Definition: pngenc.c:59
AV_WL32
#define AV_WL32(p, v)
Definition: intreadwrite.h:426
AVColorTransferCharacteristic
AVColorTransferCharacteristic
Color Transfer Characteristic.
Definition: pixfmt.h:473
libm.h
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:245
FFSWAP
#define FFSWAP(type, a, b)
Definition: common.h:108
av_frame_get_side_data
AVFrameSideData * av_frame_get_side_data(const AVFrame *frame, enum AVFrameSideDataType type)
Definition: frame.c:616
AV_WB32_PNG
#define AV_WB32_PNG(buf, n)
Definition: pngenc.c:292
AVCRC
uint32_t AVCRC
Definition: crc.h:47
png_get_chrm
static int png_get_chrm(enum AVColorPrimaries prim, uint8_t *buf)
Definition: pngenc.c:293
MKTAG
#define MKTAG(a, b, c, d)
Definition: common.h:478
ff_png_get_nb_channels
int ff_png_get_nb_channels(int color_type)
Definition: png.c:49
APNG_DISPOSE_OP_BACKGROUND
@ APNG_DISPOSE_OP_BACKGROUND
Definition: apng.h:32
PNGEncContext::bits_per_pixel
int bits_per_pixel
Definition: pngenc.c:66
PNGEncContext::last_frame
AVFrame * last_frame
Definition: pngenc.c:76
output
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce output
Definition: filter_design.txt:225
ff_apng_encoder
const AVCodec ff_apng_encoder
Definition: pngenc.c:1146
AVFrame::color_primaries
enum AVColorPrimaries color_primaries
Definition: frame.h:532
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:111
apng_encode_frame
static int apng_encode_frame(AVCodecContext *avctx, const AVFrame *pict, APNGFctlChunk *best_fctl_chunk, APNGFctlChunk *best_last_fctl_chunk)
Definition: pngenc.c:712
APNGFctlChunk::delay_num
uint16_t delay_num
Definition: pngenc.c:44
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:303
AV_PIX_FMT_RGBA64BE
@ AV_PIX_FMT_RGBA64BE
packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is st...
Definition: pixfmt.h:195
AVFrame::width
int width
Definition: frame.h:361
PNG_FILTER_VALUE_MIXED
#define PNG_FILTER_VALUE_MIXED
Definition: png.h:45
w
uint8_t w
Definition: llviddspenc.c:39
internal.h
AVPacket::data
uint8_t * data
Definition: packet.h:365
AVOption
AVOption.
Definition: opt.h:248
b
#define b
Definition: input.c:41
data
const char data[16]
Definition: mxf.c:142
png_write_row
static int png_write_row(AVCodecContext *avctx, const uint8_t *data, int size)
Definition: pngenc.c:271
output_data
static int output_data(MLPDecodeContext *m, unsigned int substr, AVFrame *frame, int *got_frame_ptr)
Write the audio data into the output buffer.
Definition: mlpdec.c:1063
PNGEncContext::dpm
int dpm
Physical pixel density, in dots per meter, if set.
Definition: pngenc.c:61
png_get_gama
static int png_get_gama(enum AVColorTransferCharacteristic trc, uint8_t *buf)
Definition: pngenc.c:335
PNGEncContext::last_frame_packet
uint8_t * last_frame_packet
Definition: pngenc.c:78
AVColorPrimaries
AVColorPrimaries
Chromaticity coordinates of the source primaries.
Definition: pixfmt.h:448
AV_CODEC_ID_APNG
@ AV_CODEC_ID_APNG
Definition: codec_id.h:263
FF_COMPRESSION_DEFAULT
#define FF_COMPRESSION_DEFAULT
Definition: avcodec.h:574
ff_png_zfree
void ff_png_zfree(void *opaque, void *ptr)
Definition: png.c:44
AV_PKT_FLAG_KEY
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: packet.h:396
ff_png_encoder
const AVCodec ff_png_encoder
Definition: pngenc.c:1124
AV_WB64
#define AV_WB64(p, v)
Definition: intreadwrite.h:433
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:317
av_malloc
#define av_malloc(s)
Definition: tableprint_vlc.h:31
NB_PASSES
#define NB_PASSES
Definition: png.h:47
crc.h
AV_PIX_FMT_GRAY16BE
@ AV_PIX_FMT_GRAY16BE
Y , 16bpp, big-endian.
Definition: pixfmt.h:97
AV_STEREO3D_SIDEBYSIDE
@ AV_STEREO3D_SIDEBYSIDE
Views are next to each other.
Definition: stereo3d.h:67
PNGEncContext::prev_frame
AVFrame * prev_frame
Definition: pngenc.c:75
AVCOL_TRC_IEC61966_2_1
@ AVCOL_TRC_IEC61966_2_1
IEC 61966-2-1 (sRGB or sYCC)
Definition: pixfmt.h:487
U
#define U(x)
Definition: vp56_arith.h:37
ff_png_pass_row_size
int ff_png_pass_row_size(int pass, int bits_per_pixel, int width)
Definition: png.c:62
fail
#define fail()
Definition: checkasm.h:134
AV_STEREO3D_2D
@ AV_STEREO3D_2D
Video is not stereoscopic (and metadata has to be there).
Definition: stereo3d.h:55
avpriv_get_gamma_from_trc
double avpriv_get_gamma_from_trc(enum AVColorTransferCharacteristic trc)
Determine a suitable 'gamma' value to match the supplied AVColorTransferCharacteristic.
Definition: color_utils.c:28
AVCodecContext::flags
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:581
APNGFctlChunk::blend_op
uint8_t blend_op
Definition: pngenc.c:45
AVRational::num
int num
Numerator.
Definition: rational.h:59
encode_png
static int encode_png(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *pict, int *got_packet)
Definition: pngenc.c:524
PNG_COLOR_TYPE_RGB_ALPHA
#define PNG_COLOR_TYPE_RGB_ALPHA
Definition: png.h:36
AV_CODEC_FLAG_INTERLACED_DCT
#define AV_CODEC_FLAG_INTERLACED_DCT
Use interlaced DCT.
Definition: avcodec.h:298
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:98
av_bswap32
#define av_bswap32
Definition: bswap.h:33
color_utils.h
avassert.h
pkt
AVPacket * pkt
Definition: movenc.c:59
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:181
av_cold
#define av_cold
Definition: attributes.h:90
encode_apng
static int encode_apng(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *pict, int *got_packet)
Definition: pngenc.c:841
mask
static const uint16_t mask[17]
Definition: lzw.c:38
PNGEncContext::bytestream_end
uint8_t * bytestream_end
Definition: pngenc.c:54
width
#define width
stereo3d.h
s
#define s(width, name)
Definition: cbs_vp9.c:257
png_filter_row
static void png_filter_row(PNGEncContext *c, uint8_t *dst, int filter_type, uint8_t *src, uint8_t *top, int size, int bpp)
Definition: pngenc.c:165
png_write_chunk
static void png_write_chunk(uint8_t **f, uint32_t tag, const uint8_t *buf, int length)
Definition: pngenc.c:224
PNG_COLOR_TYPE_RGB
#define PNG_COLOR_TYPE_RGB
Definition: png.h:35
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:289
AV_INPUT_BUFFER_MIN_SIZE
#define AV_INPUT_BUFFER_MIN_SIZE
Definition: avcodec.h:199
png_write_image_data
static void png_write_image_data(AVCodecContext *avctx, const uint8_t *buf, int length)
Definition: pngenc.c:243
APNG_DISPOSE_OP_PREVIOUS
@ APNG_DISPOSE_OP_PREVIOUS
Definition: apng.h:33
AVCOL_PRI_SMPTE240M
@ AVCOL_PRI_SMPTE240M
functionally identical to above
Definition: pixfmt.h:457
APNG_DISPOSE_OP_NONE
@ APNG_DISPOSE_OP_NONE
Definition: apng.h:31
f
#define f(width, name)
Definition: cbs_vp9.c:255
pass
#define pass
Definition: fft_template.c:603
AV_PIX_FMT_RGBA
@ AV_PIX_FMT_RGBA
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:93
AVCOL_PRI_BT470BG
@ AVCOL_PRI_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM
Definition: pixfmt.h:455
AVCodecContext::codec_id
enum AVCodecID codec_id
Definition: avcodec.h:511
AVStereo3D::flags
int flags
Additional information about the frame packing.
Definition: stereo3d.h:185
AVCOL_PRI_SMPTE170M
@ AVCOL_PRI_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC
Definition: pixfmt.h:456
AV_CODEC_ID_PNG
@ AV_CODEC_ID_PNG
Definition: codec_id.h:110
PNGEncContext
Definition: pngenc.c:48
APNGFctlChunk::y_offset
uint32_t y_offset
Definition: pngenc.c:43
AV_CODEC_CAP_FRAME_THREADS
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: codec.h:108
AV_PIX_FMT_GRAY8A
@ AV_PIX_FMT_GRAY8A
alias for AV_PIX_FMT_YA8
Definition: pixfmt.h:136
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
APNGFctlChunk::delay_den
uint16_t delay_den
Definition: pngenc.c:44
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:67
NULL
#define NULL
Definition: coverity.c:32
apng.h
AV_WB16
#define AV_WB16(p, v)
Definition: intreadwrite.h:405
IOBUF_SIZE
#define IOBUF_SIZE
Definition: pngenc.c:38
AV_PIX_FMT_MONOBLACK
@ AV_PIX_FMT_MONOBLACK
Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb.
Definition: pixfmt.h:76
AVCOL_PRI_BT709
@ AVCOL_PRI_BT709
also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP177 Annex B
Definition: pixfmt.h:450
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:235
apng_do_inverse_blend
static int apng_do_inverse_blend(AVFrame *output, const AVFrame *input, APNGFctlChunk *fctl_chunk, uint8_t bpp)
Definition: pngenc.c:569
APNGFctlChunk::width
uint32_t width
Definition: pngenc.c:42
src
#define src
Definition: vp8dsp.c:255
png_enc_close
static av_cold int png_enc_close(AVCodecContext *avctx)
Definition: pngenc.c:1082
PNG_COLOR_TYPE_GRAY
#define PNG_COLOR_TYPE_GRAY
Definition: png.h:33
deflate
static void deflate(uint8_t *dst, const uint8_t *p1, int width, int threshold, const uint8_t *coordinates[], int coord, int maxc)
Definition: vf_neighbor.c:165
PNGEncContext::filter_type
int filter_type
Definition: pngenc.c:56
abs
#define abs(x)
Definition: cuda_runtime.h:35
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:74
PNGEncContext::extra_data_updated
int extra_data_updated
Definition: pngenc.c:71
APNGFctlChunk
Definition: pngenc.c:40
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
ff_png_pass_ymask
const uint8_t ff_png_pass_ymask[NB_PASSES]
Definition: png.c:25
ff_llvidencdsp_init
av_cold void ff_llvidencdsp_init(LLVidEncDSPContext *c)
Definition: lossless_videoencdsp.c:91
for
for(j=16;j >0;--j)
Definition: h264pred_template.c:469
AVCOL_PRI_BT2020
@ AVCOL_PRI_BT2020
ITU-R BT2020.
Definition: pixfmt.h:459
APNGFctlChunk::sequence_number
uint32_t sequence_number
Definition: pngenc.c:41
AV_WB32
#define AV_WB32(p, v)
Definition: intreadwrite.h:419
PNG_FILTER_VALUE_NONE
#define PNG_FILTER_VALUE_NONE
Definition: png.h:40
AV_PIX_FMT_RGB24
@ AV_PIX_FMT_RGB24
packed RGB 8:8:8, 24bpp, RGBRGB...
Definition: pixfmt.h:68
AVPacket::size
int size
Definition: packet.h:366
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:117
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:326
av_frame_copy
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:677
AV_PIX_FMT_YA16BE
@ AV_PIX_FMT_YA16BE
16 bits gray, 16 bits alpha (big-endian)
Definition: pixfmt.h:202
PNGEncContext::last_frame_packet_size
size_t last_frame_packet_size
Definition: pngenc.c:79
PNG_FILTER_VALUE_AVG
#define PNG_FILTER_VALUE_AVG
Definition: png.h:43
size
int size
Definition: twinvq_data.h:10344
PNGEncContext::llvidencdsp
LLVidEncDSPContext llvidencdsp
Definition: pngenc.c:50
AVFrameSideData::data
uint8_t * data
Definition: frame.h:211
MKBETAG
#define MKBETAG(a, b, c, d)
Definition: common.h:479
PNG_FILTER_VALUE_PAETH
#define PNG_FILTER_VALUE_PAETH
Definition: png.h:44
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:376
PNGEncContext::extra_data
uint8_t * extra_data
Definition: pngenc.c:72
PNG_FILTER_VALUE_UP
#define PNG_FILTER_VALUE_UP
Definition: png.h:42
png_choose_filter
static uint8_t * png_choose_filter(PNGEncContext *s, uint8_t *dst, uint8_t *src, uint8_t *top, int size, int bpp)
Definition: pngenc.c:194
AVPacket::dts
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed.
Definition: packet.h:364
FFMIN
#define FFMIN(a, b)
Definition: common.h:105
a
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:41
av_crc_get_table
const AVCRC * av_crc_get_table(AVCRCId crc_id)
Get an initialized standard CRC table.
Definition: crc.c:374
OFFSET
#define OFFSET(x)
Definition: pngenc.c:1095
AVPacket::flags
int flags
A combination of AV_PKT_FLAG values.
Definition: packet.h:371
AV_STEREO3D_FLAG_INVERT
#define AV_STEREO3D_FLAG_INVERT
Inverted views, Right/Bottom represents the left view.
Definition: stereo3d.h:167
input
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some input
Definition: filter_design.txt:172
PNGSIG
#define PNGSIG
Definition: png.h:49
input_data
static void input_data(MLPEncodeContext *ctx, void *samples)
Wrapper function for inputting data in two different bit-depths.
Definition: mlpenc.c:1278
lossless_videoencdsp.h
src1
#define src1
Definition: h264pred.c:140
AVCodecContext::bits_per_coded_sample
int bits_per_coded_sample
bits per sample/pixel from the demuxer (needed for huffyuv).
Definition: avcodec.h:1524
PNG_FILTER_VALUE_SUB
#define PNG_FILTER_VALUE_SUB
Definition: png.h:41
i
int i
Definition: input.c:407
AV_PIX_FMT_RGB48BE
@ AV_PIX_FMT_RGB48BE
packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big...
Definition: pixfmt.h:102
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:358
options
static const AVOption options[]
Definition: pngenc.c:1097
AV_FRAME_DATA_STEREO3D
@ AV_FRAME_DATA_STEREO3D
Stereoscopic 3d metadata.
Definition: frame.h:63
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:436
ff_png_zalloc
void * ff_png_zalloc(void *opaque, unsigned int items, unsigned int size)
Definition: png.c:39
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:204
apngenc_class
static const AVClass apngenc_class
Definition: pngenc.c:1117
len
int len
Definition: vorbis_enc_data.h:426
AVCodecContext::height
int height
Definition: avcodec.h:674
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:711
LLVidEncDSPContext
Definition: lossless_videoencdsp.h:26
sub_left_prediction
static void sub_left_prediction(PNGEncContext *c, uint8_t *dst, const uint8_t *src, int bpp, int size)
Definition: pngenc.c:149
PNGEncContext::color_type
int color_type
Definition: pngenc.c:65
avcodec.h
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
AVCOL_PRI_BT470M
@ AVCOL_PRI_BT470M
also FCC Title 47 Code of Federal Regulations 73.682 (a)(20)
Definition: pixfmt.h:453
tag
uint32_t tag
Definition: movenc.c:1594
ret
ret
Definition: filter_design.txt:187
pred
static const float pred[4]
Definition: siprdata.h:259
PNGEncContext::extra_data_size
int extra_data_size
Definition: pngenc.c:73
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:72
AVStereo3D::type
enum AVStereo3DType type
How views are packed within the video.
Definition: stereo3d.h:180
PNGEncContext::bit_depth
int bit_depth
Definition: pngenc.c:64
checksum
static volatile int checksum
Definition: adler32.c:30
PNGEncContext::bytestream_start
uint8_t * bytestream_start
Definition: pngenc.c:53
AVCodecContext
main external API structure.
Definition: avcodec.h:501
AVFrame::height
int height
Definition: frame.h:361
av_packet_new_side_data
uint8_t * av_packet_new_side_data(AVPacket *pkt, enum AVPacketSideDataType type, size_t size)
Allocate new information of a packet.
Definition: avpacket.c:220
av_crc
uint32_t av_crc(const AVCRC *ctx, uint32_t crc, const uint8_t *buffer, size_t length)
Calculate the CRC of a block.
Definition: crc.c:392
AV_PKT_DATA_NEW_EXTRADATA
@ AV_PKT_DATA_NEW_EXTRADATA
The AV_PKT_DATA_NEW_EXTRADATA is used to notify the codec or the format that the extradata buffer was...
Definition: packet.h:55
AVRational::den
int den
Denominator.
Definition: rational.h:60
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:225
png_get_interlaced_row
static void png_get_interlaced_row(uint8_t *dst, int row_size, int bits_per_pixel, int pass, const uint8_t *src, int width)
Definition: pngenc.c:82
AV_CODEC_CAP_DELAY
#define AV_CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
Definition: codec.h:77
APNG_BLEND_OP_SOURCE
@ APNG_BLEND_OP_SOURCE
Definition: apng.h:37
AV_CRC_32_IEEE_LE
@ AV_CRC_32_IEEE_LE
Definition: crc.h:54
PNGEncContext::last_frame_fctl
APNGFctlChunk last_frame_fctl
Definition: pngenc.c:77
PNGEncContext::dpi
int dpi
Physical pixel density, in dots per inch, if set.
Definition: pngenc.c:60
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AVCodecContext::frame_number
int frame_number
Frame counter, set by libavcodec.
Definition: avcodec.h:1129
AVFrameSideData
Structure to hold side data for an AVFrame.
Definition: frame.h:209
png_enc_init
static av_cold int png_enc_init(AVCodecContext *avctx)
Definition: pngenc.c:989
alpha
static const int16_t alpha[]
Definition: ilbcdata.h:55
AVPacket
This structure stores compressed data.
Definition: packet.h:342
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:528
png.h
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
d
d
Definition: ffmpeg_filter.c:156
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:674
bytestream.h
sub_png_paeth_prediction
static void sub_png_paeth_prediction(uint8_t *dst, uint8_t *src, uint8_t *top, int w, int bpp)
Definition: pngenc.c:121
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:334
PNG_COLOR_TYPE_GRAY_ALPHA
#define PNG_COLOR_TYPE_GRAY_ALPHA
Definition: png.h:37
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
APNGFctlChunk::height
uint32_t height
Definition: pngenc.c:42
AVStereo3D
Stereo 3D type: this structure describes how two videos are packed within a single video surface,...
Definition: stereo3d.h:176
PNGEncContext::bytestream
uint8_t * bytestream
Definition: pngenc.c:52
PNGEncContext::is_progressive
int is_progressive
Definition: pngenc.c:63
ff_alloc_packet2
int ff_alloc_packet2(AVCodecContext *avctx, AVPacket *avpkt, int64_t size, int64_t min_size)
Check AVPacket size and/or allocate data.
Definition: encode.c:33
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:234
VE
#define VE
Definition: pngenc.c:1096
encode_headers
static int encode_headers(AVCodecContext *avctx, const AVFrame *pict)
Definition: pngenc.c:345
APNGFctlChunk::dispose_op
uint8_t dispose_op
Definition: pngenc.c:45
AVCodecContext::sample_aspect_ratio
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel.
Definition: avcodec.h:859
PNGEncContext::palette_checksum
uint32_t palette_checksum
Definition: pngenc.c:69
PNG_COLOR_TYPE_PALETTE
#define PNG_COLOR_TYPE_PALETTE
Definition: png.h:34
APNGFctlChunk::x_offset
uint32_t x_offset
Definition: pngenc.c:43
PNGEncContext::sequence_number
uint32_t sequence_number
Definition: pngenc.c:70
AVCodecContext::compression_level
int compression_level
Definition: avcodec.h:573
pngenc_class
static const AVClass pngenc_class
Definition: pngenc.c:1110
PNGEncContext::zstream
z_stream zstream
Definition: pngenc.c:58