FFmpeg
pngenc.c
Go to the documentation of this file.
1 /*
2  * PNG image format
3  * Copyright (c) 2003 Fabrice Bellard
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 #include "avcodec.h"
23 #include "codec_internal.h"
24 #include "encode.h"
25 #include "bytestream.h"
26 #include "lossless_videoencdsp.h"
27 #include "png.h"
28 #include "apng.h"
29 #include "zlib_wrapper.h"
30 
31 #include "libavutil/avassert.h"
32 #include "libavutil/crc.h"
33 #include "libavutil/libm.h"
34 #include "libavutil/opt.h"
35 #include "libavutil/color_utils.h"
36 #include "libavutil/stereo3d.h"
37 
38 #include <zlib.h>
39 
40 #define IOBUF_SIZE 4096
41 
42 typedef struct APNGFctlChunk {
43  uint32_t sequence_number;
44  uint32_t width, height;
45  uint32_t x_offset, y_offset;
46  uint16_t delay_num, delay_den;
47  uint8_t dispose_op, blend_op;
49 
50 typedef struct PNGEncContext {
51  AVClass *class;
53 
54  uint8_t *bytestream;
55  uint8_t *bytestream_start;
56  uint8_t *bytestream_end;
57 
59 
61  uint8_t buf[IOBUF_SIZE];
62  int dpi; ///< Physical pixel density, in dots per inch, if set
63  int dpm; ///< Physical pixel density, in dots per meter, if set
64 
66  int bit_depth;
69 
70  // APNG
71  uint32_t palette_checksum; // Used to ensure a single unique palette
72  uint32_t sequence_number;
74  uint8_t *extra_data;
76 
83 
84 static void png_get_interlaced_row(uint8_t *dst, int row_size,
85  int bits_per_pixel, int pass,
86  const uint8_t *src, int width)
87 {
88  int x, mask, dst_x, j, b, bpp;
89  uint8_t *d;
90  const uint8_t *s;
91  static const int masks[] = {0x80, 0x08, 0x88, 0x22, 0xaa, 0x55, 0xff};
92 
93  mask = masks[pass];
94  switch (bits_per_pixel) {
95  case 1:
96  memset(dst, 0, row_size);
97  dst_x = 0;
98  for (x = 0; x < width; x++) {
99  j = (x & 7);
100  if ((mask << j) & 0x80) {
101  b = (src[x >> 3] >> (7 - j)) & 1;
102  dst[dst_x >> 3] |= b << (7 - (dst_x & 7));
103  dst_x++;
104  }
105  }
106  break;
107  default:
108  bpp = bits_per_pixel >> 3;
109  d = dst;
110  s = src;
111  for (x = 0; x < width; x++) {
112  j = x & 7;
113  if ((mask << j) & 0x80) {
114  memcpy(d, s, bpp);
115  d += bpp;
116  }
117  s += bpp;
118  }
119  break;
120  }
121 }
122 
123 static void sub_png_paeth_prediction(uint8_t *dst, uint8_t *src, uint8_t *top,
124  int w, int bpp)
125 {
126  int i;
127  for (i = 0; i < w; i++) {
128  int a, b, c, p, pa, pb, pc;
129 
130  a = src[i - bpp];
131  b = top[i];
132  c = top[i - bpp];
133 
134  p = b - c;
135  pc = a - c;
136 
137  pa = abs(p);
138  pb = abs(pc);
139  pc = abs(p + pc);
140 
141  if (pa <= pb && pa <= pc)
142  p = a;
143  else if (pb <= pc)
144  p = b;
145  else
146  p = c;
147  dst[i] = src[i] - p;
148  }
149 }
150 
151 static void sub_left_prediction(PNGEncContext *c, uint8_t *dst, const uint8_t *src, int bpp, int size)
152 {
153  const uint8_t *src1 = src + bpp;
154  const uint8_t *src2 = src;
155  int x, unaligned_w;
156 
157  memcpy(dst, src, bpp);
158  dst += bpp;
159  size -= bpp;
160  unaligned_w = FFMIN(32 - bpp, size);
161  for (x = 0; x < unaligned_w; x++)
162  *dst++ = *src1++ - *src2++;
163  size -= unaligned_w;
164  c->llvidencdsp.diff_bytes(dst, src1, src2, size);
165 }
166 
167 static void png_filter_row(PNGEncContext *c, uint8_t *dst, int filter_type,
168  uint8_t *src, uint8_t *top, int size, int bpp)
169 {
170  int i;
171 
172  switch (filter_type) {
174  memcpy(dst, src, size);
175  break;
177  sub_left_prediction(c, dst, src, bpp, size);
178  break;
179  case PNG_FILTER_VALUE_UP:
180  c->llvidencdsp.diff_bytes(dst, src, top, size);
181  break;
183  for (i = 0; i < bpp; i++)
184  dst[i] = src[i] - (top[i] >> 1);
185  for (; i < size; i++)
186  dst[i] = src[i] - ((src[i - bpp] + top[i]) >> 1);
187  break;
189  for (i = 0; i < bpp; i++)
190  dst[i] = src[i] - top[i];
191  sub_png_paeth_prediction(dst + i, src + i, top + i, size - i, bpp);
192  break;
193  }
194 }
195 
196 static uint8_t *png_choose_filter(PNGEncContext *s, uint8_t *dst,
197  uint8_t *src, uint8_t *top, int size, int bpp)
198 {
199  int pred = s->filter_type;
200  av_assert0(bpp || !pred);
201  if (!top && pred)
203  if (pred == PNG_FILTER_VALUE_MIXED) {
204  int i;
205  int cost, bcost = INT_MAX;
206  uint8_t *buf1 = dst, *buf2 = dst + size + 16;
207  for (pred = 0; pred < 5; pred++) {
208  png_filter_row(s, buf1 + 1, pred, src, top, size, bpp);
209  buf1[0] = pred;
210  cost = 0;
211  for (i = 0; i <= size; i++)
212  cost += abs((int8_t) buf1[i]);
213  if (cost < bcost) {
214  bcost = cost;
215  FFSWAP(uint8_t *, buf1, buf2);
216  }
217  }
218  return buf2;
219  } else {
220  png_filter_row(s, dst + 1, pred, src, top, size, bpp);
221  dst[0] = pred;
222  return dst;
223  }
224 }
225 
226 static void png_write_chunk(uint8_t **f, uint32_t tag,
227  const uint8_t *buf, int length)
228 {
229  const AVCRC *crc_table = av_crc_get_table(AV_CRC_32_IEEE_LE);
230  uint32_t crc = ~0U;
231  uint8_t tagbuf[4];
232 
233  bytestream_put_be32(f, length);
234  AV_WL32(tagbuf, tag);
235  crc = av_crc(crc_table, crc, tagbuf, 4);
236  bytestream_put_be32(f, av_bswap32(tag));
237  if (length > 0) {
238  crc = av_crc(crc_table, crc, buf, length);
239  if (*f != buf)
240  memcpy(*f, buf, length);
241  *f += length;
242  }
243  bytestream_put_be32(f, ~crc);
244 }
245 
247  const uint8_t *buf, int length)
248 {
249  PNGEncContext *s = avctx->priv_data;
250  const AVCRC *crc_table = av_crc_get_table(AV_CRC_32_IEEE_LE);
251  uint32_t crc = ~0U;
252 
253  if (avctx->codec_id == AV_CODEC_ID_PNG || avctx->frame_number == 0) {
254  png_write_chunk(&s->bytestream, MKTAG('I', 'D', 'A', 'T'), buf, length);
255  return;
256  }
257 
258  bytestream_put_be32(&s->bytestream, length + 4);
259 
260  bytestream_put_be32(&s->bytestream, MKBETAG('f', 'd', 'A', 'T'));
261  bytestream_put_be32(&s->bytestream, s->sequence_number);
262  crc = av_crc(crc_table, crc, s->bytestream - 8, 8);
263 
264  crc = av_crc(crc_table, crc, buf, length);
265  memcpy(s->bytestream, buf, length);
266  s->bytestream += length;
267 
268  bytestream_put_be32(&s->bytestream, ~crc);
269 
270  ++s->sequence_number;
271 }
272 
273 /* XXX: do filtering */
274 static int png_write_row(AVCodecContext *avctx, const uint8_t *data, int size)
275 {
276  PNGEncContext *s = avctx->priv_data;
277  z_stream *const zstream = &s->zstream.zstream;
278  int ret;
279 
280  zstream->avail_in = size;
281  zstream->next_in = data;
282  while (zstream->avail_in > 0) {
283  ret = deflate(zstream, Z_NO_FLUSH);
284  if (ret != Z_OK)
285  return -1;
286  if (zstream->avail_out == 0) {
287  if (s->bytestream_end - s->bytestream > IOBUF_SIZE + 100)
288  png_write_image_data(avctx, s->buf, IOBUF_SIZE);
289  zstream->avail_out = IOBUF_SIZE;
290  zstream->next_out = s->buf;
291  }
292  }
293  return 0;
294 }
295 
296 #define AV_WB32_PNG(buf, n) AV_WB32(buf, lrint((n) * 100000))
297 static int png_get_chrm(enum AVColorPrimaries prim, uint8_t *buf)
298 {
299  double rx, ry, gx, gy, bx, by, wx = 0.3127, wy = 0.3290;
300  switch (prim) {
301  case AVCOL_PRI_BT709:
302  rx = 0.640; ry = 0.330;
303  gx = 0.300; gy = 0.600;
304  bx = 0.150; by = 0.060;
305  break;
306  case AVCOL_PRI_BT470M:
307  rx = 0.670; ry = 0.330;
308  gx = 0.210; gy = 0.710;
309  bx = 0.140; by = 0.080;
310  wx = 0.310; wy = 0.316;
311  break;
312  case AVCOL_PRI_BT470BG:
313  rx = 0.640; ry = 0.330;
314  gx = 0.290; gy = 0.600;
315  bx = 0.150; by = 0.060;
316  break;
317  case AVCOL_PRI_SMPTE170M:
318  case AVCOL_PRI_SMPTE240M:
319  rx = 0.630; ry = 0.340;
320  gx = 0.310; gy = 0.595;
321  bx = 0.155; by = 0.070;
322  break;
323  case AVCOL_PRI_BT2020:
324  rx = 0.708; ry = 0.292;
325  gx = 0.170; gy = 0.797;
326  bx = 0.131; by = 0.046;
327  break;
328  default:
329  return 0;
330  }
331 
332  AV_WB32_PNG(buf , wx); AV_WB32_PNG(buf + 4 , wy);
333  AV_WB32_PNG(buf + 8 , rx); AV_WB32_PNG(buf + 12, ry);
334  AV_WB32_PNG(buf + 16, gx); AV_WB32_PNG(buf + 20, gy);
335  AV_WB32_PNG(buf + 24, bx); AV_WB32_PNG(buf + 28, by);
336  return 1;
337 }
338 
339 static int png_get_gama(enum AVColorTransferCharacteristic trc, uint8_t *buf)
340 {
341  double gamma = avpriv_get_gamma_from_trc(trc);
342  if (gamma <= 1e-6)
343  return 0;
344 
345  AV_WB32_PNG(buf, 1.0 / gamma);
346  return 1;
347 }
348 
350 {
351  z_stream *const zstream = &s->zstream.zstream;
352  const AVDictionaryEntry *entry;
353  const char *name;
354  uint8_t *start, *buf;
355  int ret;
356 
357  if (!sd || !sd->size)
358  return 0;
359  zstream->next_in = sd->data;
360  zstream->avail_in = sd->size;
361 
362  /* write the chunk contents first */
363  start = s->bytestream + 8; /* make room for iCCP tag + length */
364  buf = start;
365 
366  /* profile description */
367  entry = av_dict_get(sd->metadata, "name", NULL, 0);
368  name = (entry && entry->value[0]) ? entry->value : "icc";
369  for (int i = 0;; i++) {
370  char c = (i == 79) ? 0 : name[i];
371  bytestream_put_byte(&buf, c);
372  if (!c)
373  break;
374  }
375 
376  /* compression method and profile data */
377  bytestream_put_byte(&buf, 0);
378  zstream->next_out = buf;
379  zstream->avail_out = s->bytestream_end - buf;
380  ret = deflate(zstream, Z_FINISH);
381  deflateReset(zstream);
382  if (ret != Z_STREAM_END)
383  return AVERROR_EXTERNAL;
384 
385  /* rewind to the start and write the chunk header/crc */
386  png_write_chunk(&s->bytestream, MKTAG('i', 'C', 'C', 'P'), start,
387  zstream->next_out - start);
388  return 0;
389 }
390 
391 static int encode_headers(AVCodecContext *avctx, const AVFrame *pict)
392 {
393  AVFrameSideData *side_data;
394  PNGEncContext *s = avctx->priv_data;
395  int ret;
396 
397  /* write png header */
398  AV_WB32(s->buf, avctx->width);
399  AV_WB32(s->buf + 4, avctx->height);
400  s->buf[8] = s->bit_depth;
401  s->buf[9] = s->color_type;
402  s->buf[10] = 0; /* compression type */
403  s->buf[11] = 0; /* filter type */
404  s->buf[12] = s->is_progressive; /* interlace type */
405  png_write_chunk(&s->bytestream, MKTAG('I', 'H', 'D', 'R'), s->buf, 13);
406 
407  /* write physical information */
408  if (s->dpm) {
409  AV_WB32(s->buf, s->dpm);
410  AV_WB32(s->buf + 4, s->dpm);
411  s->buf[8] = 1; /* unit specifier is meter */
412  } else {
413  AV_WB32(s->buf, avctx->sample_aspect_ratio.num);
414  AV_WB32(s->buf + 4, avctx->sample_aspect_ratio.den);
415  s->buf[8] = 0; /* unit specifier is unknown */
416  }
417  png_write_chunk(&s->bytestream, MKTAG('p', 'H', 'Y', 's'), s->buf, 9);
418 
419  /* write stereoscopic information */
421  if (side_data) {
422  AVStereo3D *stereo3d = (AVStereo3D *)side_data->data;
423  switch (stereo3d->type) {
425  s->buf[0] = ((stereo3d->flags & AV_STEREO3D_FLAG_INVERT) == 0) ? 1 : 0;
426  png_write_chunk(&s->bytestream, MKTAG('s', 'T', 'E', 'R'), s->buf, 1);
427  break;
428  case AV_STEREO3D_2D:
429  break;
430  default:
431  av_log(avctx, AV_LOG_WARNING, "Only side-by-side stereo3d flag can be defined within sTER chunk\n");
432  break;
433  }
434  }
435 
436  /* write colorspace information */
437  if (pict->color_primaries == AVCOL_PRI_BT709 &&
439  s->buf[0] = 1; /* rendering intent, relative colorimetric by default */
440  png_write_chunk(&s->bytestream, MKTAG('s', 'R', 'G', 'B'), s->buf, 1);
441  }
442 
443  if (png_get_chrm(pict->color_primaries, s->buf))
444  png_write_chunk(&s->bytestream, MKTAG('c', 'H', 'R', 'M'), s->buf, 32);
445  if (png_get_gama(pict->color_trc, s->buf))
446  png_write_chunk(&s->bytestream, MKTAG('g', 'A', 'M', 'A'), s->buf, 4);
447 
449  if ((ret = png_write_iccp(s, side_data)))
450  return ret;
451 
452  /* put the palette if needed, must be after colorspace information */
453  if (s->color_type == PNG_COLOR_TYPE_PALETTE) {
454  int has_alpha, alpha, i;
455  unsigned int v;
456  uint32_t *palette;
457  uint8_t *ptr, *alpha_ptr;
458 
459  palette = (uint32_t *)pict->data[1];
460  ptr = s->buf;
461  alpha_ptr = s->buf + 256 * 3;
462  has_alpha = 0;
463  for (i = 0; i < 256; i++) {
464  v = palette[i];
465  alpha = v >> 24;
466  if (alpha != 0xff)
467  has_alpha = 1;
468  *alpha_ptr++ = alpha;
469  bytestream_put_be24(&ptr, v);
470  }
471  png_write_chunk(&s->bytestream,
472  MKTAG('P', 'L', 'T', 'E'), s->buf, 256 * 3);
473  if (has_alpha) {
474  png_write_chunk(&s->bytestream,
475  MKTAG('t', 'R', 'N', 'S'), s->buf + 256 * 3, 256);
476  }
477  }
478 
479  return 0;
480 }
481 
482 static int encode_frame(AVCodecContext *avctx, const AVFrame *pict)
483 {
484  PNGEncContext *s = avctx->priv_data;
485  z_stream *const zstream = &s->zstream.zstream;
486  const AVFrame *const p = pict;
487  int y, len, ret;
488  int row_size, pass_row_size;
489  uint8_t *ptr, *top, *crow_buf, *crow;
490  uint8_t *crow_base = NULL;
491  uint8_t *progressive_buf = NULL;
492  uint8_t *top_buf = NULL;
493 
494  row_size = (pict->width * s->bits_per_pixel + 7) >> 3;
495 
496  crow_base = av_malloc((row_size + 32) << (s->filter_type == PNG_FILTER_VALUE_MIXED));
497  if (!crow_base) {
498  ret = AVERROR(ENOMEM);
499  goto the_end;
500  }
501  // pixel data should be aligned, but there's a control byte before it
502  crow_buf = crow_base + 15;
503  if (s->is_progressive) {
504  progressive_buf = av_malloc(row_size + 1);
505  top_buf = av_malloc(row_size + 1);
506  if (!progressive_buf || !top_buf) {
507  ret = AVERROR(ENOMEM);
508  goto the_end;
509  }
510  }
511 
512  /* put each row */
513  zstream->avail_out = IOBUF_SIZE;
514  zstream->next_out = s->buf;
515  if (s->is_progressive) {
516  int pass;
517 
518  for (pass = 0; pass < NB_PASSES; pass++) {
519  /* NOTE: a pass is completely omitted if no pixels would be
520  * output */
521  pass_row_size = ff_png_pass_row_size(pass, s->bits_per_pixel, pict->width);
522  if (pass_row_size > 0) {
523  top = NULL;
524  for (y = 0; y < pict->height; y++)
525  if ((ff_png_pass_ymask[pass] << (y & 7)) & 0x80) {
526  ptr = p->data[0] + y * p->linesize[0];
527  FFSWAP(uint8_t *, progressive_buf, top_buf);
528  png_get_interlaced_row(progressive_buf, pass_row_size,
529  s->bits_per_pixel, pass,
530  ptr, pict->width);
531  crow = png_choose_filter(s, crow_buf, progressive_buf,
532  top, pass_row_size, s->bits_per_pixel >> 3);
533  png_write_row(avctx, crow, pass_row_size + 1);
534  top = progressive_buf;
535  }
536  }
537  }
538  } else {
539  top = NULL;
540  for (y = 0; y < pict->height; y++) {
541  ptr = p->data[0] + y * p->linesize[0];
542  crow = png_choose_filter(s, crow_buf, ptr, top,
543  row_size, s->bits_per_pixel >> 3);
544  png_write_row(avctx, crow, row_size + 1);
545  top = ptr;
546  }
547  }
548  /* compress last bytes */
549  for (;;) {
550  ret = deflate(zstream, Z_FINISH);
551  if (ret == Z_OK || ret == Z_STREAM_END) {
552  len = IOBUF_SIZE - zstream->avail_out;
553  if (len > 0 && s->bytestream_end - s->bytestream > len + 100) {
554  png_write_image_data(avctx, s->buf, len);
555  }
556  zstream->avail_out = IOBUF_SIZE;
557  zstream->next_out = s->buf;
558  if (ret == Z_STREAM_END)
559  break;
560  } else {
561  ret = -1;
562  goto the_end;
563  }
564  }
565 
566  ret = 0;
567 
568 the_end:
569  av_freep(&crow_base);
570  av_freep(&progressive_buf);
571  av_freep(&top_buf);
572  deflateReset(zstream);
573  return ret;
574 }
575 
576 static int add_icc_profile_size(AVCodecContext *avctx, const AVFrame *pict,
577  uint64_t *max_packet_size)
578 {
579  PNGEncContext *s = avctx->priv_data;
580  const AVFrameSideData *sd;
581  const int hdr_size = 128;
582  uint64_t new_pkt_size;
583  uLong bound;
584 
585  if (!pict)
586  return 0;
588  if (!sd || !sd->size)
589  return 0;
590  if (sd->size != (uLong) sd->size)
591  return AVERROR_INVALIDDATA;
592 
593  bound = deflateBound(&s->zstream.zstream, sd->size);
594  if (bound > INT32_MAX - hdr_size)
595  return AVERROR_INVALIDDATA;
596 
597  new_pkt_size = *max_packet_size + bound + hdr_size;
598  if (new_pkt_size < *max_packet_size)
599  return AVERROR_INVALIDDATA;
600  *max_packet_size = new_pkt_size;
601  return 0;
602 }
603 
604 static int encode_png(AVCodecContext *avctx, AVPacket *pkt,
605  const AVFrame *pict, int *got_packet)
606 {
607  PNGEncContext *s = avctx->priv_data;
608  int ret;
609  int enc_row_size;
610  uint64_t max_packet_size;
611 
612  enc_row_size = deflateBound(&s->zstream.zstream,
613  (avctx->width * s->bits_per_pixel + 7) >> 3);
614  max_packet_size =
615  AV_INPUT_BUFFER_MIN_SIZE + // headers
616  avctx->height * (
617  enc_row_size +
618  12 * (((int64_t)enc_row_size + IOBUF_SIZE - 1) / IOBUF_SIZE) // IDAT * ceil(enc_row_size / IOBUF_SIZE)
619  );
620  if ((ret = add_icc_profile_size(avctx, pict, &max_packet_size)))
621  return ret;
622  ret = ff_alloc_packet(avctx, pkt, max_packet_size);
623  if (ret < 0)
624  return ret;
625 
626  s->bytestream_start =
627  s->bytestream = pkt->data;
628  s->bytestream_end = pkt->data + pkt->size;
629 
630  AV_WB64(s->bytestream, PNGSIG);
631  s->bytestream += 8;
632 
633  ret = encode_headers(avctx, pict);
634  if (ret < 0)
635  return ret;
636 
637  ret = encode_frame(avctx, pict);
638  if (ret < 0)
639  return ret;
640 
641  png_write_chunk(&s->bytestream, MKTAG('I', 'E', 'N', 'D'), NULL, 0);
642 
643  pkt->size = s->bytestream - s->bytestream_start;
645  *got_packet = 1;
646 
647  return 0;
648 }
649 
651  APNGFctlChunk *fctl_chunk, uint8_t bpp)
652 {
653  // output: background, input: foreground
654  // output the image such that when blended with the background, will produce the foreground
655 
656  unsigned int x, y;
657  unsigned int leftmost_x = input->width;
658  unsigned int rightmost_x = 0;
659  unsigned int topmost_y = input->height;
660  unsigned int bottommost_y = 0;
661  const uint8_t *input_data = input->data[0];
662  uint8_t *output_data = output->data[0];
663  ptrdiff_t input_linesize = input->linesize[0];
664  ptrdiff_t output_linesize = output->linesize[0];
665 
666  // Find bounding box of changes
667  for (y = 0; y < input->height; ++y) {
668  for (x = 0; x < input->width; ++x) {
669  if (!memcmp(input_data + bpp * x, output_data + bpp * x, bpp))
670  continue;
671 
672  if (x < leftmost_x)
673  leftmost_x = x;
674  if (x >= rightmost_x)
675  rightmost_x = x + 1;
676  if (y < topmost_y)
677  topmost_y = y;
678  if (y >= bottommost_y)
679  bottommost_y = y + 1;
680  }
681 
682  input_data += input_linesize;
683  output_data += output_linesize;
684  }
685 
686  if (leftmost_x == input->width && rightmost_x == 0) {
687  // Empty frame
688  // APNG does not support empty frames, so we make it a 1x1 frame
689  leftmost_x = topmost_y = 0;
690  rightmost_x = bottommost_y = 1;
691  }
692 
693  // Do actual inverse blending
694  if (fctl_chunk->blend_op == APNG_BLEND_OP_SOURCE) {
695  output_data = output->data[0];
696  for (y = topmost_y; y < bottommost_y; ++y) {
697  memcpy(output_data,
698  input->data[0] + input_linesize * y + bpp * leftmost_x,
699  bpp * (rightmost_x - leftmost_x));
700  output_data += output_linesize;
701  }
702  } else { // APNG_BLEND_OP_OVER
703  size_t transparent_palette_index;
704  uint32_t *palette;
705 
706  switch (input->format) {
707  case AV_PIX_FMT_RGBA64BE:
708  case AV_PIX_FMT_YA16BE:
709  case AV_PIX_FMT_RGBA:
710  case AV_PIX_FMT_GRAY8A:
711  break;
712 
713  case AV_PIX_FMT_PAL8:
714  palette = (uint32_t*)input->data[1];
715  for (transparent_palette_index = 0; transparent_palette_index < 256; ++transparent_palette_index)
716  if (palette[transparent_palette_index] >> 24 == 0)
717  break;
718  break;
719 
720  default:
721  // No alpha, so blending not possible
722  return -1;
723  }
724 
725  for (y = topmost_y; y < bottommost_y; ++y) {
726  uint8_t *foreground = input->data[0] + input_linesize * y + bpp * leftmost_x;
727  uint8_t *background = output->data[0] + output_linesize * y + bpp * leftmost_x;
728  output_data = output->data[0] + output_linesize * (y - topmost_y);
729  for (x = leftmost_x; x < rightmost_x; ++x, foreground += bpp, background += bpp, output_data += bpp) {
730  if (!memcmp(foreground, background, bpp)) {
731  if (input->format == AV_PIX_FMT_PAL8) {
732  if (transparent_palette_index == 256) {
733  // Need fully transparent colour, but none exists
734  return -1;
735  }
736 
737  *output_data = transparent_palette_index;
738  } else {
739  memset(output_data, 0, bpp);
740  }
741  continue;
742  }
743 
744  // Check for special alpha values, since full inverse
745  // alpha-on-alpha blending is rarely possible, and when
746  // possible, doesn't compress much better than
747  // APNG_BLEND_OP_SOURCE blending
748  switch (input->format) {
749  case AV_PIX_FMT_RGBA64BE:
750  if (((uint16_t*)foreground)[3] == 0xffff ||
751  ((uint16_t*)background)[3] == 0)
752  break;
753  return -1;
754 
755  case AV_PIX_FMT_YA16BE:
756  if (((uint16_t*)foreground)[1] == 0xffff ||
757  ((uint16_t*)background)[1] == 0)
758  break;
759  return -1;
760 
761  case AV_PIX_FMT_RGBA:
762  if (foreground[3] == 0xff || background[3] == 0)
763  break;
764  return -1;
765 
766  case AV_PIX_FMT_GRAY8A:
767  if (foreground[1] == 0xff || background[1] == 0)
768  break;
769  return -1;
770 
771  case AV_PIX_FMT_PAL8:
772  if (palette[*foreground] >> 24 == 0xff ||
773  palette[*background] >> 24 == 0)
774  break;
775  return -1;
776  }
777 
778  memmove(output_data, foreground, bpp);
779  }
780  }
781  }
782 
783  output->width = rightmost_x - leftmost_x;
784  output->height = bottommost_y - topmost_y;
785  fctl_chunk->width = output->width;
786  fctl_chunk->height = output->height;
787  fctl_chunk->x_offset = leftmost_x;
788  fctl_chunk->y_offset = topmost_y;
789 
790  return 0;
791 }
792 
793 static int apng_encode_frame(AVCodecContext *avctx, const AVFrame *pict,
794  APNGFctlChunk *best_fctl_chunk, APNGFctlChunk *best_last_fctl_chunk)
795 {
796  PNGEncContext *s = avctx->priv_data;
797  int ret;
798  unsigned int y;
799  AVFrame* diffFrame;
800  uint8_t bpp = (s->bits_per_pixel + 7) >> 3;
801  uint8_t *original_bytestream, *original_bytestream_end;
802  uint8_t *temp_bytestream = 0, *temp_bytestream_end;
803  uint32_t best_sequence_number;
804  uint8_t *best_bytestream;
805  size_t best_bytestream_size = SIZE_MAX;
806  APNGFctlChunk last_fctl_chunk = *best_last_fctl_chunk;
807  APNGFctlChunk fctl_chunk = *best_fctl_chunk;
808 
809  if (avctx->frame_number == 0) {
810  best_fctl_chunk->width = pict->width;
811  best_fctl_chunk->height = pict->height;
812  best_fctl_chunk->x_offset = 0;
813  best_fctl_chunk->y_offset = 0;
814  best_fctl_chunk->blend_op = APNG_BLEND_OP_SOURCE;
815  return encode_frame(avctx, pict);
816  }
817 
818  diffFrame = av_frame_alloc();
819  if (!diffFrame)
820  return AVERROR(ENOMEM);
821 
822  diffFrame->format = pict->format;
823  diffFrame->width = pict->width;
824  diffFrame->height = pict->height;
825  if ((ret = av_frame_get_buffer(diffFrame, 0)) < 0)
826  goto fail;
827 
828  original_bytestream = s->bytestream;
829  original_bytestream_end = s->bytestream_end;
830 
831  temp_bytestream = av_malloc(original_bytestream_end - original_bytestream);
832  if (!temp_bytestream) {
833  ret = AVERROR(ENOMEM);
834  goto fail;
835  }
836  temp_bytestream_end = temp_bytestream + (original_bytestream_end - original_bytestream);
837 
838  for (last_fctl_chunk.dispose_op = 0; last_fctl_chunk.dispose_op < 3; ++last_fctl_chunk.dispose_op) {
839  // 0: APNG_DISPOSE_OP_NONE
840  // 1: APNG_DISPOSE_OP_BACKGROUND
841  // 2: APNG_DISPOSE_OP_PREVIOUS
842 
843  for (fctl_chunk.blend_op = 0; fctl_chunk.blend_op < 2; ++fctl_chunk.blend_op) {
844  // 0: APNG_BLEND_OP_SOURCE
845  // 1: APNG_BLEND_OP_OVER
846 
847  uint32_t original_sequence_number = s->sequence_number, sequence_number;
848  uint8_t *bytestream_start = s->bytestream;
849  size_t bytestream_size;
850 
851  // Do disposal
852  if (last_fctl_chunk.dispose_op != APNG_DISPOSE_OP_PREVIOUS) {
853  diffFrame->width = pict->width;
854  diffFrame->height = pict->height;
855  ret = av_frame_copy(diffFrame, s->last_frame);
856  if (ret < 0)
857  goto fail;
858 
859  if (last_fctl_chunk.dispose_op == APNG_DISPOSE_OP_BACKGROUND) {
860  for (y = last_fctl_chunk.y_offset; y < last_fctl_chunk.y_offset + last_fctl_chunk.height; ++y) {
861  size_t row_start = diffFrame->linesize[0] * y + bpp * last_fctl_chunk.x_offset;
862  memset(diffFrame->data[0] + row_start, 0, bpp * last_fctl_chunk.width);
863  }
864  }
865  } else {
866  if (!s->prev_frame)
867  continue;
868 
869  diffFrame->width = pict->width;
870  diffFrame->height = pict->height;
871  ret = av_frame_copy(diffFrame, s->prev_frame);
872  if (ret < 0)
873  goto fail;
874  }
875 
876  // Do inverse blending
877  if (apng_do_inverse_blend(diffFrame, pict, &fctl_chunk, bpp) < 0)
878  continue;
879 
880  // Do encoding
881  ret = encode_frame(avctx, diffFrame);
882  sequence_number = s->sequence_number;
883  s->sequence_number = original_sequence_number;
884  bytestream_size = s->bytestream - bytestream_start;
885  s->bytestream = bytestream_start;
886  if (ret < 0)
887  goto fail;
888 
889  if (bytestream_size < best_bytestream_size) {
890  *best_fctl_chunk = fctl_chunk;
891  *best_last_fctl_chunk = last_fctl_chunk;
892 
893  best_sequence_number = sequence_number;
894  best_bytestream = s->bytestream;
895  best_bytestream_size = bytestream_size;
896 
897  if (best_bytestream == original_bytestream) {
898  s->bytestream = temp_bytestream;
899  s->bytestream_end = temp_bytestream_end;
900  } else {
901  s->bytestream = original_bytestream;
902  s->bytestream_end = original_bytestream_end;
903  }
904  }
905  }
906  }
907 
908  s->sequence_number = best_sequence_number;
909  s->bytestream = original_bytestream + best_bytestream_size;
910  s->bytestream_end = original_bytestream_end;
911  if (best_bytestream != original_bytestream)
912  memcpy(original_bytestream, best_bytestream, best_bytestream_size);
913 
914  ret = 0;
915 
916 fail:
917  av_freep(&temp_bytestream);
918  av_frame_free(&diffFrame);
919  return ret;
920 }
921 
923  const AVFrame *pict, int *got_packet)
924 {
925  PNGEncContext *s = avctx->priv_data;
926  int ret;
927  int enc_row_size;
928  uint64_t max_packet_size;
929  APNGFctlChunk fctl_chunk = {0};
930 
931  if (pict && s->color_type == PNG_COLOR_TYPE_PALETTE) {
932  uint32_t checksum = ~av_crc(av_crc_get_table(AV_CRC_32_IEEE_LE), ~0U, pict->data[1], 256 * sizeof(uint32_t));
933 
934  if (avctx->frame_number == 0) {
935  s->palette_checksum = checksum;
936  } else if (checksum != s->palette_checksum) {
937  av_log(avctx, AV_LOG_ERROR,
938  "Input contains more than one unique palette. APNG does not support multiple palettes.\n");
939  return -1;
940  }
941  }
942 
943  enc_row_size = deflateBound(&s->zstream.zstream,
944  (avctx->width * s->bits_per_pixel + 7) >> 3);
945  max_packet_size =
946  AV_INPUT_BUFFER_MIN_SIZE + // headers
947  avctx->height * (
948  enc_row_size +
949  (4 + 12) * (((int64_t)enc_row_size + IOBUF_SIZE - 1) / IOBUF_SIZE) // fdAT * ceil(enc_row_size / IOBUF_SIZE)
950  );
951  if ((ret = add_icc_profile_size(avctx, pict, &max_packet_size)))
952  return ret;
953  if (max_packet_size > INT_MAX)
954  return AVERROR(ENOMEM);
955 
956  if (avctx->frame_number == 0) {
957  if (!pict)
958  return AVERROR(EINVAL);
959 
960  s->bytestream = s->extra_data = av_malloc(AV_INPUT_BUFFER_MIN_SIZE);
961  if (!s->extra_data)
962  return AVERROR(ENOMEM);
963 
964  ret = encode_headers(avctx, pict);
965  if (ret < 0)
966  return ret;
967 
968  s->extra_data_size = s->bytestream - s->extra_data;
969 
970  s->last_frame_packet = av_malloc(max_packet_size);
971  if (!s->last_frame_packet)
972  return AVERROR(ENOMEM);
973  } else if (s->last_frame) {
974  ret = ff_get_encode_buffer(avctx, pkt, s->last_frame_packet_size, 0);
975  if (ret < 0)
976  return ret;
977 
978  memcpy(pkt->data, s->last_frame_packet, s->last_frame_packet_size);
979  pkt->pts = pkt->dts = s->last_frame->pts;
980  }
981 
982  if (pict) {
983  s->bytestream_start =
984  s->bytestream = s->last_frame_packet;
985  s->bytestream_end = s->bytestream + max_packet_size;
986 
987  // We're encoding the frame first, so we have to do a bit of shuffling around
988  // to have the image data write to the correct place in the buffer
989  fctl_chunk.sequence_number = s->sequence_number;
990  ++s->sequence_number;
991  s->bytestream += APNG_FCTL_CHUNK_SIZE + 12;
992 
993  ret = apng_encode_frame(avctx, pict, &fctl_chunk, &s->last_frame_fctl);
994  if (ret < 0)
995  return ret;
996 
997  fctl_chunk.delay_num = 0; // delay filled in during muxing
998  fctl_chunk.delay_den = 0;
999  } else {
1000  s->last_frame_fctl.dispose_op = APNG_DISPOSE_OP_NONE;
1001  }
1002 
1003  if (s->last_frame) {
1004  uint8_t* last_fctl_chunk_start = pkt->data;
1005  uint8_t buf[APNG_FCTL_CHUNK_SIZE];
1006  if (!s->extra_data_updated) {
1007  uint8_t *side_data = av_packet_new_side_data(pkt, AV_PKT_DATA_NEW_EXTRADATA, s->extra_data_size);
1008  if (!side_data)
1009  return AVERROR(ENOMEM);
1010  memcpy(side_data, s->extra_data, s->extra_data_size);
1011  s->extra_data_updated = 1;
1012  }
1013 
1014  AV_WB32(buf + 0, s->last_frame_fctl.sequence_number);
1015  AV_WB32(buf + 4, s->last_frame_fctl.width);
1016  AV_WB32(buf + 8, s->last_frame_fctl.height);
1017  AV_WB32(buf + 12, s->last_frame_fctl.x_offset);
1018  AV_WB32(buf + 16, s->last_frame_fctl.y_offset);
1019  AV_WB16(buf + 20, s->last_frame_fctl.delay_num);
1020  AV_WB16(buf + 22, s->last_frame_fctl.delay_den);
1021  buf[24] = s->last_frame_fctl.dispose_op;
1022  buf[25] = s->last_frame_fctl.blend_op;
1023  png_write_chunk(&last_fctl_chunk_start, MKTAG('f', 'c', 'T', 'L'), buf, sizeof(buf));
1024 
1025  *got_packet = 1;
1026  }
1027 
1028  if (pict) {
1029  if (!s->last_frame) {
1030  s->last_frame = av_frame_alloc();
1031  if (!s->last_frame)
1032  return AVERROR(ENOMEM);
1033  } else if (s->last_frame_fctl.dispose_op != APNG_DISPOSE_OP_PREVIOUS) {
1034  if (!s->prev_frame) {
1035  s->prev_frame = av_frame_alloc();
1036  if (!s->prev_frame)
1037  return AVERROR(ENOMEM);
1038 
1039  s->prev_frame->format = pict->format;
1040  s->prev_frame->width = pict->width;
1041  s->prev_frame->height = pict->height;
1042  if ((ret = av_frame_get_buffer(s->prev_frame, 0)) < 0)
1043  return ret;
1044  }
1045 
1046  // Do disposal, but not blending
1047  av_frame_copy(s->prev_frame, s->last_frame);
1048  if (s->last_frame_fctl.dispose_op == APNG_DISPOSE_OP_BACKGROUND) {
1049  uint32_t y;
1050  uint8_t bpp = (s->bits_per_pixel + 7) >> 3;
1051  for (y = s->last_frame_fctl.y_offset; y < s->last_frame_fctl.y_offset + s->last_frame_fctl.height; ++y) {
1052  size_t row_start = s->prev_frame->linesize[0] * y + bpp * s->last_frame_fctl.x_offset;
1053  memset(s->prev_frame->data[0] + row_start, 0, bpp * s->last_frame_fctl.width);
1054  }
1055  }
1056  }
1057 
1058  av_frame_unref(s->last_frame);
1059  ret = av_frame_ref(s->last_frame, (AVFrame*)pict);
1060  if (ret < 0)
1061  return ret;
1062 
1063  s->last_frame_fctl = fctl_chunk;
1064  s->last_frame_packet_size = s->bytestream - s->bytestream_start;
1065  } else {
1066  av_frame_free(&s->last_frame);
1067  }
1068 
1069  return 0;
1070 }
1071 
1073 {
1074  PNGEncContext *s = avctx->priv_data;
1075  int compression_level;
1076 
1077  switch (avctx->pix_fmt) {
1078  case AV_PIX_FMT_RGBA:
1079  avctx->bits_per_coded_sample = 32;
1080  break;
1081  case AV_PIX_FMT_RGB24:
1082  avctx->bits_per_coded_sample = 24;
1083  break;
1084  case AV_PIX_FMT_GRAY8:
1085  avctx->bits_per_coded_sample = 0x28;
1086  break;
1087  case AV_PIX_FMT_MONOBLACK:
1088  avctx->bits_per_coded_sample = 1;
1089  break;
1090  case AV_PIX_FMT_PAL8:
1091  avctx->bits_per_coded_sample = 8;
1092  }
1093 
1094  ff_llvidencdsp_init(&s->llvidencdsp);
1095 
1096  if (avctx->pix_fmt == AV_PIX_FMT_MONOBLACK)
1097  s->filter_type = PNG_FILTER_VALUE_NONE;
1098 
1099  if (s->dpi && s->dpm) {
1100  av_log(avctx, AV_LOG_ERROR, "Only one of 'dpi' or 'dpm' options should be set\n");
1101  return AVERROR(EINVAL);
1102  } else if (s->dpi) {
1103  s->dpm = s->dpi * 10000 / 254;
1104  }
1105 
1106  s->is_progressive = !!(avctx->flags & AV_CODEC_FLAG_INTERLACED_DCT);
1107  switch (avctx->pix_fmt) {
1108  case AV_PIX_FMT_RGBA64BE:
1109  s->bit_depth = 16;
1110  s->color_type = PNG_COLOR_TYPE_RGB_ALPHA;
1111  break;
1112  case AV_PIX_FMT_RGB48BE:
1113  s->bit_depth = 16;
1114  s->color_type = PNG_COLOR_TYPE_RGB;
1115  break;
1116  case AV_PIX_FMT_RGBA:
1117  s->bit_depth = 8;
1118  s->color_type = PNG_COLOR_TYPE_RGB_ALPHA;
1119  break;
1120  case AV_PIX_FMT_RGB24:
1121  s->bit_depth = 8;
1122  s->color_type = PNG_COLOR_TYPE_RGB;
1123  break;
1124  case AV_PIX_FMT_GRAY16BE:
1125  s->bit_depth = 16;
1126  s->color_type = PNG_COLOR_TYPE_GRAY;
1127  break;
1128  case AV_PIX_FMT_GRAY8:
1129  s->bit_depth = 8;
1130  s->color_type = PNG_COLOR_TYPE_GRAY;
1131  break;
1132  case AV_PIX_FMT_GRAY8A:
1133  s->bit_depth = 8;
1134  s->color_type = PNG_COLOR_TYPE_GRAY_ALPHA;
1135  break;
1136  case AV_PIX_FMT_YA16BE:
1137  s->bit_depth = 16;
1138  s->color_type = PNG_COLOR_TYPE_GRAY_ALPHA;
1139  break;
1140  case AV_PIX_FMT_MONOBLACK:
1141  s->bit_depth = 1;
1142  s->color_type = PNG_COLOR_TYPE_GRAY;
1143  break;
1144  case AV_PIX_FMT_PAL8:
1145  s->bit_depth = 8;
1146  s->color_type = PNG_COLOR_TYPE_PALETTE;
1147  break;
1148  default:
1149  return -1;
1150  }
1151  s->bits_per_pixel = ff_png_get_nb_channels(s->color_type) * s->bit_depth;
1152 
1153  compression_level = avctx->compression_level == FF_COMPRESSION_DEFAULT
1154  ? Z_DEFAULT_COMPRESSION
1155  : av_clip(avctx->compression_level, 0, 9);
1156  return ff_deflate_init(&s->zstream, compression_level, avctx);
1157 }
1158 
1160 {
1161  PNGEncContext *s = avctx->priv_data;
1162 
1163  ff_deflate_end(&s->zstream);
1164  av_frame_free(&s->last_frame);
1165  av_frame_free(&s->prev_frame);
1166  av_freep(&s->last_frame_packet);
1167  av_freep(&s->extra_data);
1168  s->extra_data_size = 0;
1169  return 0;
1170 }
1171 
1172 #define OFFSET(x) offsetof(PNGEncContext, x)
1173 #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
1174 static const AVOption options[] = {
1175  {"dpi", "Set image resolution (in dots per inch)", OFFSET(dpi), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 0x10000, VE},
1176  {"dpm", "Set image resolution (in dots per meter)", OFFSET(dpm), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 0x10000, VE},
1177  { "pred", "Prediction method", OFFSET(filter_type), AV_OPT_TYPE_INT, { .i64 = PNG_FILTER_VALUE_NONE }, PNG_FILTER_VALUE_NONE, PNG_FILTER_VALUE_MIXED, VE, "pred" },
1178  { "none", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_NONE }, INT_MIN, INT_MAX, VE, "pred" },
1179  { "sub", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_SUB }, INT_MIN, INT_MAX, VE, "pred" },
1180  { "up", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_UP }, INT_MIN, INT_MAX, VE, "pred" },
1181  { "avg", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_AVG }, INT_MIN, INT_MAX, VE, "pred" },
1182  { "paeth", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_PAETH }, INT_MIN, INT_MAX, VE, "pred" },
1183  { "mixed", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_MIXED }, INT_MIN, INT_MAX, VE, "pred" },
1184  { NULL},
1185 };
1186 
1187 static const AVClass pngenc_class = {
1188  .class_name = "(A)PNG encoder",
1189  .item_name = av_default_item_name,
1190  .option = options,
1191  .version = LIBAVUTIL_VERSION_INT,
1192 };
1193 
1195  .p.name = "png",
1196  .p.long_name = NULL_IF_CONFIG_SMALL("PNG (Portable Network Graphics) image"),
1197  .p.type = AVMEDIA_TYPE_VIDEO,
1198  .p.id = AV_CODEC_ID_PNG,
1199  .priv_data_size = sizeof(PNGEncContext),
1200  .init = png_enc_init,
1201  .close = png_enc_close,
1203  .p.capabilities = AV_CODEC_CAP_FRAME_THREADS,
1204  .p.pix_fmts = (const enum AVPixelFormat[]) {
1211  },
1212  .p.priv_class = &pngenc_class,
1213  .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE,
1214 };
1215 
1217  .p.name = "apng",
1218  .p.long_name = NULL_IF_CONFIG_SMALL("APNG (Animated Portable Network Graphics) image"),
1219  .p.type = AVMEDIA_TYPE_VIDEO,
1220  .p.id = AV_CODEC_ID_APNG,
1221  .p.capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY,
1222  .priv_data_size = sizeof(PNGEncContext),
1223  .init = png_enc_init,
1224  .close = png_enc_close,
1226  .p.pix_fmts = (const enum AVPixelFormat[]) {
1233  },
1234  .p.priv_class = &pngenc_class,
1235  .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE,
1236 };
AVFrame::color_trc
enum AVColorTransferCharacteristic color_trc
Definition: frame.h:582
encode_frame
static int encode_frame(AVCodecContext *avctx, const AVFrame *pict)
Definition: pngenc.c:482
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
av_clip
#define av_clip
Definition: common.h:95
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
PNGEncContext::buf
uint8_t buf[IOBUF_SIZE]
Definition: pngenc.c:61
AV_WL32
#define AV_WL32(p, v)
Definition: intreadwrite.h:426
AVColorTransferCharacteristic
AVColorTransferCharacteristic
Color Transfer Characteristic.
Definition: pixfmt.h:496
libm.h
ff_png_encoder
const FFCodec ff_png_encoder
Definition: pngenc.c:1194
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:254
av_frame_get_side_data
AVFrameSideData * av_frame_get_side_data(const AVFrame *frame, enum AVFrameSideDataType type)
Definition: frame.c:684
AV_WB32_PNG
#define AV_WB32_PNG(buf, n)
Definition: pngenc.c:296
AVCRC
uint32_t AVCRC
Definition: crc.h:46
png_get_chrm
static int png_get_chrm(enum AVColorPrimaries prim, uint8_t *buf)
Definition: pngenc.c:297
APNG_FCTL_CHUNK_SIZE
#define APNG_FCTL_CHUNK_SIZE
Definition: apng.h:42
ff_png_get_nb_channels
int ff_png_get_nb_channels(int color_type)
Definition: png.c:41
APNG_DISPOSE_OP_BACKGROUND
@ APNG_DISPOSE_OP_BACKGROUND
Definition: apng.h:32
PNGEncContext::bits_per_pixel
int bits_per_pixel
Definition: pngenc.c:68
src1
const pixel * src1
Definition: h264pred_template.c:421
PNGEncContext::last_frame
AVFrame * last_frame
Definition: pngenc.c:78
output
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce output
Definition: filter_design.txt:225
AVFrame::color_primaries
enum AVColorPrimaries color_primaries
Definition: frame.h:580
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:111
apng_encode_frame
static int apng_encode_frame(AVCodecContext *avctx, const AVFrame *pict, APNGFctlChunk *best_fctl_chunk, APNGFctlChunk *best_last_fctl_chunk)
Definition: pngenc.c:793
APNGFctlChunk::delay_num
uint16_t delay_num
Definition: pngenc.c:46
test::height
int height
Definition: vc1dsp.c:39
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:325
AV_PIX_FMT_RGBA64BE
@ AV_PIX_FMT_RGBA64BE
packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is st...
Definition: pixfmt.h:195
AVFrame::width
int width
Definition: frame.h:397
PNG_FILTER_VALUE_MIXED
#define PNG_FILTER_VALUE_MIXED
Definition: png.h:45
w
uint8_t w
Definition: llviddspenc.c:38
AVPacket::data
uint8_t * data
Definition: packet.h:374
AVOption
AVOption.
Definition: opt.h:251
encode.h
b
#define b
Definition: input.c:34
data
const char data[16]
Definition: mxf.c:143
png_write_row
static int png_write_row(AVCodecContext *avctx, const uint8_t *data, int size)
Definition: pngenc.c:274
FFCodec
Definition: codec_internal.h:112
output_data
static int output_data(MLPDecodeContext *m, unsigned int substr, AVFrame *frame, int *got_frame_ptr)
Write the audio data into the output buffer.
Definition: mlpdec.c:1092
PNGEncContext::dpm
int dpm
Physical pixel density, in dots per meter, if set.
Definition: pngenc.c:63
png_get_gama
static int png_get_gama(enum AVColorTransferCharacteristic trc, uint8_t *buf)
Definition: pngenc.c:339
PNGEncContext::last_frame_packet
uint8_t * last_frame_packet
Definition: pngenc.c:80
AVColorPrimaries
AVColorPrimaries
Chromaticity coordinates of the source primaries.
Definition: pixfmt.h:471
ff_deflate_end
void ff_deflate_end(FFZStream *zstream)
Wrapper around deflateEnd().
AV_CODEC_ID_APNG
@ AV_CODEC_ID_APNG
Definition: codec_id.h:264
FF_COMPRESSION_DEFAULT
#define FF_COMPRESSION_DEFAULT
Definition: avcodec.h:462
AV_PKT_FLAG_KEY
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: packet.h:429
AV_WB64
#define AV_WB64(p, v)
Definition: intreadwrite.h:433
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:346
av_malloc
#define av_malloc(s)
Definition: tableprint_vlc.h:30
NB_PASSES
#define NB_PASSES
Definition: png.h:47
init
static int init
Definition: av_tx.c:47
crc.h
ff_apng_encoder
const FFCodec ff_apng_encoder
Definition: pngenc.c:1216
AV_PIX_FMT_GRAY16BE
@ AV_PIX_FMT_GRAY16BE
Y , 16bpp, big-endian.
Definition: pixfmt.h:97
AV_STEREO3D_SIDEBYSIDE
@ AV_STEREO3D_SIDEBYSIDE
Views are next to each other.
Definition: stereo3d.h:67
FFCodec::p
AVCodec p
The public AVCodec.
Definition: codec_internal.h:116
PNGEncContext::prev_frame
AVFrame * prev_frame
Definition: pngenc.c:77
AVCOL_TRC_IEC61966_2_1
@ AVCOL_TRC_IEC61966_2_1
IEC 61966-2-1 (sRGB or sYCC)
Definition: pixfmt.h:510
U
#define U(x)
Definition: vp56_arith.h:37
ff_png_pass_row_size
int ff_png_pass_row_size(int pass, int bits_per_pixel, int width)
Definition: png.c:54
fail
#define fail()
Definition: checkasm.h:131
AV_STEREO3D_2D
@ AV_STEREO3D_2D
Video is not stereoscopic (and metadata has to be there).
Definition: stereo3d.h:55
avpriv_get_gamma_from_trc
double avpriv_get_gamma_from_trc(enum AVColorTransferCharacteristic trc)
Determine a suitable 'gamma' value to match the supplied AVColorTransferCharacteristic.
Definition: color_utils.c:27
AVCodecContext::flags
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:469
APNGFctlChunk::blend_op
uint8_t blend_op
Definition: pngenc.c:47
FF_CODEC_ENCODE_CB
#define FF_CODEC_ENCODE_CB(func)
Definition: codec_internal.h:263
AVRational::num
int num
Numerator.
Definition: rational.h:59
encode_png
static int encode_png(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *pict, int *got_packet)
Definition: pngenc.c:604
PNG_COLOR_TYPE_RGB_ALPHA
#define PNG_COLOR_TYPE_RGB_ALPHA
Definition: png.h:36
AV_CODEC_FLAG_INTERLACED_DCT
#define AV_CODEC_FLAG_INTERLACED_DCT
Use interlaced DCT.
Definition: avcodec.h:266
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:99
av_bswap32
#define av_bswap32
Definition: bswap.h:33
color_utils.h
avassert.h
pkt
AVPacket * pkt
Definition: movenc.c:59
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
zlib_wrapper.h
AVFrameSideData::size
size_t size
Definition: frame.h:234
av_cold
#define av_cold
Definition: attributes.h:90
encode_apng
static int encode_apng(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *pict, int *got_packet)
Definition: pngenc.c:922
mask
static const uint16_t mask[17]
Definition: lzw.c:38
av_dict_get
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:40
PNGEncContext::bytestream_end
uint8_t * bytestream_end
Definition: pngenc.c:56
width
#define width
stereo3d.h
s
#define s(width, name)
Definition: cbs_vp9.c:256
png_filter_row
static void png_filter_row(PNGEncContext *c, uint8_t *dst, int filter_type, uint8_t *src, uint8_t *top, int size, int bpp)
Definition: pngenc.c:167
png_write_chunk
static void png_write_chunk(uint8_t **f, uint32_t tag, const uint8_t *buf, int length)
Definition: pngenc.c:226
PNG_COLOR_TYPE_RGB
#define PNG_COLOR_TYPE_RGB
Definition: png.h:35
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
AV_INPUT_BUFFER_MIN_SIZE
#define AV_INPUT_BUFFER_MIN_SIZE
Definition: avcodec.h:191
png_write_image_data
static void png_write_image_data(AVCodecContext *avctx, const uint8_t *buf, int length)
Definition: pngenc.c:246
APNG_DISPOSE_OP_PREVIOUS
@ APNG_DISPOSE_OP_PREVIOUS
Definition: apng.h:33
AVCOL_PRI_SMPTE240M
@ AVCOL_PRI_SMPTE240M
identical to above, also called "SMPTE C" even though it uses D65
Definition: pixfmt.h:480
APNG_DISPOSE_OP_NONE
@ APNG_DISPOSE_OP_NONE
Definition: apng.h:31
pass
#define pass
Definition: fft_template.c:608
AV_PIX_FMT_RGBA
@ AV_PIX_FMT_RGBA
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:93
AVCOL_PRI_BT470BG
@ AVCOL_PRI_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM
Definition: pixfmt.h:478
AVCodecContext::codec_id
enum AVCodecID codec_id
Definition: avcodec.h:399
AVStereo3D::flags
int flags
Additional information about the frame packing.
Definition: stereo3d.h:185
AVCOL_PRI_SMPTE170M
@ AVCOL_PRI_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC
Definition: pixfmt.h:479
AV_CODEC_ID_PNG
@ AV_CODEC_ID_PNG
Definition: codec_id.h:111
PNGEncContext
Definition: pngenc.c:50
APNGFctlChunk::y_offset
uint32_t y_offset
Definition: pngenc.c:45
AV_CODEC_CAP_FRAME_THREADS
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: codec.h:113
AV_PIX_FMT_GRAY8A
@ AV_PIX_FMT_GRAY8A
alias for AV_PIX_FMT_YA8
Definition: pixfmt.h:136
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
APNGFctlChunk::delay_den
uint16_t delay_den
Definition: pngenc.c:46
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
NULL
#define NULL
Definition: coverity.c:32
apng.h
AV_WB16
#define AV_WB16(p, v)
Definition: intreadwrite.h:405
IOBUF_SIZE
#define IOBUF_SIZE
Definition: pngenc.c:40
AV_PIX_FMT_MONOBLACK
@ AV_PIX_FMT_MONOBLACK
Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb.
Definition: pixfmt.h:76
AVCOL_PRI_BT709
@ AVCOL_PRI_BT709
also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP 177 Annex B
Definition: pixfmt.h:473
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:237
apng_do_inverse_blend
static int apng_do_inverse_blend(AVFrame *output, const AVFrame *input, APNGFctlChunk *fctl_chunk, uint8_t bpp)
Definition: pngenc.c:650
APNGFctlChunk::width
uint32_t width
Definition: pngenc.c:44
png_enc_close
static av_cold int png_enc_close(AVCodecContext *avctx)
Definition: pngenc.c:1159
AV_FRAME_DATA_ICC_PROFILE
@ AV_FRAME_DATA_ICC_PROFILE
The data contains an ICC profile as an opaque octet buffer following the format described by ISO 1507...
Definition: frame.h:144
PNG_COLOR_TYPE_GRAY
#define PNG_COLOR_TYPE_GRAY
Definition: png.h:33
deflate
static void deflate(uint8_t *dst, const uint8_t *p1, int width, int threshold, const uint8_t *coordinates[], int coord, int maxc)
Definition: vf_neighbor.c:162
PNGEncContext::filter_type
int filter_type
Definition: pngenc.c:58
abs
#define abs(x)
Definition: cuda_runtime.h:35
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:74
PNGEncContext::extra_data_updated
int extra_data_updated
Definition: pngenc.c:73
APNGFctlChunk
Definition: pngenc.c:42
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
ff_png_pass_ymask
const uint8_t ff_png_pass_ymask[NB_PASSES]
Definition: png.c:27
ff_llvidencdsp_init
av_cold void ff_llvidencdsp_init(LLVidEncDSPContext *c)
Definition: lossless_videoencdsp.c:91
AVCOL_PRI_BT2020
@ AVCOL_PRI_BT2020
ITU-R BT2020.
Definition: pixfmt.h:482
add_icc_profile_size
static int add_icc_profile_size(AVCodecContext *avctx, const AVFrame *pict, uint64_t *max_packet_size)
Definition: pngenc.c:576
APNGFctlChunk::sequence_number
uint32_t sequence_number
Definition: pngenc.c:43
AV_WB32
#define AV_WB32(p, v)
Definition: intreadwrite.h:419
PNGEncContext::zstream
FFZStream zstream
Definition: pngenc.c:60
test::width
int width
Definition: vc1dsp.c:38
PNG_FILTER_VALUE_NONE
#define PNG_FILTER_VALUE_NONE
Definition: png.h:40
f
f
Definition: af_crystalizer.c:122
AV_PIX_FMT_RGB24
@ AV_PIX_FMT_RGB24
packed RGB 8:8:8, 24bpp, RGBRGB...
Definition: pixfmt.h:68
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
AVPacket::size
int size
Definition: packet.h:375
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:117
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:343
codec_internal.h
av_frame_copy
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:764
for
for(k=2;k<=8;++k)
Definition: h264pred_template.c:425
AV_PIX_FMT_YA16BE
@ AV_PIX_FMT_YA16BE
16 bits gray, 16 bits alpha (big-endian)
Definition: pixfmt.h:202
PNGEncContext::last_frame_packet_size
size_t last_frame_packet_size
Definition: pngenc.c:81
PNG_FILTER_VALUE_AVG
#define PNG_FILTER_VALUE_AVG
Definition: png.h:43
size
int size
Definition: twinvq_data.h:10344
MKBETAG
#define MKBETAG(a, b, c, d)
Definition: macros.h:56
PNGEncContext::llvidencdsp
LLVidEncDSPContext llvidencdsp
Definition: pngenc.c:52
AVFrameSideData::data
uint8_t * data
Definition: frame.h:233
PNG_FILTER_VALUE_PAETH
#define PNG_FILTER_VALUE_PAETH
Definition: png.h:44
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:412
PNGEncContext::extra_data
uint8_t * extra_data
Definition: pngenc.c:74
PNG_FILTER_VALUE_UP
#define PNG_FILTER_VALUE_UP
Definition: png.h:42
png_choose_filter
static uint8_t * png_choose_filter(PNGEncContext *s, uint8_t *dst, uint8_t *src, uint8_t *top, int size, int bpp)
Definition: pngenc.c:196
AVPacket::dts
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed.
Definition: packet.h:373
a
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:41
av_crc_get_table
const AVCRC * av_crc_get_table(AVCRCId crc_id)
Get an initialized standard CRC table.
Definition: crc.c:374
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:59
OFFSET
#define OFFSET(x)
Definition: pngenc.c:1172
AVPacket::flags
int flags
A combination of AV_PKT_FLAG values.
Definition: packet.h:380
AV_STEREO3D_FLAG_INVERT
#define AV_STEREO3D_FLAG_INVERT
Inverted views, Right/Bottom represents the left view.
Definition: stereo3d.h:167
input
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some input
Definition: filter_design.txt:172
PNGSIG
#define PNGSIG
Definition: png.h:49
lossless_videoencdsp.h
AVCodecContext::bits_per_coded_sample
int bits_per_coded_sample
bits per sample/pixel from the demuxer (needed for huffyuv).
Definition: avcodec.h:1441
PNG_FILTER_VALUE_SUB
#define PNG_FILTER_VALUE_SUB
Definition: png.h:41
AV_PIX_FMT_RGB48BE
@ AV_PIX_FMT_RGB48BE
packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big...
Definition: pixfmt.h:102
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:367
input_data
static void input_data(MLPEncodeContext *ctx, void *samples, int nb_samples)
Wrapper function for inputting data in two different bit-depths.
Definition: mlpenc.c:1205
options
static const AVOption options[]
Definition: pngenc.c:1174
src2
const pixel * src2
Definition: h264pred_template.c:422
AV_FRAME_DATA_STEREO3D
@ AV_FRAME_DATA_STEREO3D
Stereoscopic 3d metadata.
Definition: frame.h:64
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
FF_CODEC_CAP_INIT_THREADSAFE
#define FF_CODEC_CAP_INIT_THREADSAFE
The codec does not modify any global variables in the init function, allowing to call the init functi...
Definition: codec_internal.h:31
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:477
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:203
len
int len
Definition: vorbis_enc_data.h:426
AVCodecContext::height
int height
Definition: avcodec.h:562
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:599
LLVidEncDSPContext
Definition: lossless_videoencdsp.h:25
sub_left_prediction
static void sub_left_prediction(PNGEncContext *c, uint8_t *dst, const uint8_t *src, int bpp, int size)
Definition: pngenc.c:151
PNGEncContext::color_type
int color_type
Definition: pngenc.c:67
avcodec.h
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
bound
static double bound(const double threshold, const double val)
Definition: af_dynaudnorm.c:372
AVCOL_PRI_BT470M
@ AVCOL_PRI_BT470M
also FCC Title 47 Code of Federal Regulations 73.682 (a)(20)
Definition: pixfmt.h:476
tag
uint32_t tag
Definition: movenc.c:1646
ret
ret
Definition: filter_design.txt:187
pred
static const float pred[4]
Definition: siprdata.h:259
PNGEncContext::extra_data_size
int extra_data_size
Definition: pngenc.c:75
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:71
AVStereo3D::type
enum AVStereo3DType type
How views are packed within the video.
Definition: stereo3d.h:180
PNGEncContext::bit_depth
int bit_depth
Definition: pngenc.c:66
PNGEncContext::bytestream_start
uint8_t * bytestream_start
Definition: pngenc.c:55
AVCodecContext
main external API structure.
Definition: avcodec.h:389
AVFrame::height
int height
Definition: frame.h:397
av_packet_new_side_data
uint8_t * av_packet_new_side_data(AVPacket *pkt, enum AVPacketSideDataType type, size_t size)
Allocate new information of a packet.
Definition: avpacket.c:230
ff_get_encode_buffer
int ff_get_encode_buffer(AVCodecContext *avctx, AVPacket *avpkt, int64_t size, int flags)
Get a buffer for a packet.
Definition: encode.c:79
av_crc
uint32_t av_crc(const AVCRC *ctx, uint32_t crc, const uint8_t *buffer, size_t length)
Calculate the CRC of a block.
Definition: crc.c:392
AV_PKT_DATA_NEW_EXTRADATA
@ AV_PKT_DATA_NEW_EXTRADATA
The AV_PKT_DATA_NEW_EXTRADATA is used to notify the codec or the format that the extradata buffer was...
Definition: packet.h:56
AVRational::den
int den
Denominator.
Definition: rational.h:60
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:225
png_get_interlaced_row
static void png_get_interlaced_row(uint8_t *dst, int row_size, int bits_per_pixel, int pass, const uint8_t *src, int width)
Definition: pngenc.c:84
AV_CODEC_CAP_DELAY
#define AV_CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
Definition: codec.h:82
APNG_BLEND_OP_SOURCE
@ APNG_BLEND_OP_SOURCE
Definition: apng.h:37
AV_CRC_32_IEEE_LE
@ AV_CRC_32_IEEE_LE
Definition: crc.h:53
PNGEncContext::last_frame_fctl
APNGFctlChunk last_frame_fctl
Definition: pngenc.c:79
PNGEncContext::dpi
int dpi
Physical pixel density, in dots per inch, if set.
Definition: pngenc.c:62
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
FFZStream
Definition: zlib_wrapper.h:27
AVCodecContext::frame_number
int frame_number
Frame counter, set by libavcodec.
Definition: avcodec.h:1037
AVFrameSideData
Structure to hold side data for an AVFrame.
Definition: frame.h:231
png_enc_init
static av_cold int png_enc_init(AVCodecContext *avctx)
Definition: pngenc.c:1072
AVDictionaryEntry
Definition: dict.h:79
png_write_iccp
static int png_write_iccp(PNGEncContext *s, const AVFrameSideData *sd)
Definition: pngenc.c:349
alpha
static const int16_t alpha[]
Definition: ilbcdata.h:55
AVPacket
This structure stores compressed data.
Definition: packet.h:351
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:416
png.h
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
d
d
Definition: ffmpeg_filter.c:153
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:562
bytestream.h
sub_png_paeth_prediction
static void sub_png_paeth_prediction(uint8_t *dst, uint8_t *src, uint8_t *top, int w, int bpp)
Definition: pngenc.c:123
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:370
PNG_COLOR_TYPE_GRAY_ALPHA
#define PNG_COLOR_TYPE_GRAY_ALPHA
Definition: png.h:37
AVFrameSideData::metadata
AVDictionary * metadata
Definition: frame.h:235
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
APNGFctlChunk::height
uint32_t height
Definition: pngenc.c:44
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:61
MKTAG
#define MKTAG(a, b, c, d)
Definition: macros.h:55
AVStereo3D
Stereo 3D type: this structure describes how two videos are packed within a single video surface,...
Definition: stereo3d.h:176
AVDictionaryEntry::value
char * value
Definition: dict.h:81
PNGEncContext::bytestream
uint8_t * bytestream
Definition: pngenc.c:54
PNGEncContext::is_progressive
int is_progressive
Definition: pngenc.c:65
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:234
VE
#define VE
Definition: pngenc.c:1173
ff_alloc_packet
int ff_alloc_packet(AVCodecContext *avctx, AVPacket *avpkt, int64_t size)
Check AVPacket size and allocate data.
Definition: encode.c:35
encode_headers
static int encode_headers(AVCodecContext *avctx, const AVFrame *pict)
Definition: pngenc.c:391
APNGFctlChunk::dispose_op
uint8_t dispose_op
Definition: pngenc.c:47
AVCodecContext::sample_aspect_ratio
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel.
Definition: avcodec.h:759
PNGEncContext::palette_checksum
uint32_t palette_checksum
Definition: pngenc.c:71
PNG_COLOR_TYPE_PALETTE
#define PNG_COLOR_TYPE_PALETTE
Definition: png.h:34
APNGFctlChunk::x_offset
uint32_t x_offset
Definition: pngenc.c:45
ff_deflate_init
int ff_deflate_init(FFZStream *zstream, int level, void *logctx)
Wrapper around deflateInit().
PNGEncContext::sequence_number
uint32_t sequence_number
Definition: pngenc.c:72
AVCodecContext::compression_level
int compression_level
Definition: avcodec.h:461
pngenc_class
static const AVClass pngenc_class
Definition: pngenc.c:1187