FFmpeg
libvpxdec.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2010, Google, Inc.
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 /**
22  * @file
23  * VP8/9 decoder support via libvpx
24  */
25 
26 #define VPX_CODEC_DISABLE_COMPAT 1
27 #include <vpx/vpx_decoder.h>
28 #include <vpx/vpx_frame_buffer.h>
29 #include <vpx/vp8dx.h>
30 
31 #include "libavutil/common.h"
32 #include "libavutil/imgutils.h"
33 #include "libavutil/intreadwrite.h"
34 #include "avcodec.h"
35 #include "decode.h"
36 #include "internal.h"
37 #include "libvpx.h"
38 #include "profiles.h"
39 
40 typedef struct VPxDecoderContext {
41  struct vpx_codec_ctx decoder;
42  struct vpx_codec_ctx decoder_alpha;
44  size_t pool_size;
46 } VPxContext;
47 
48 
49 static int get_frame_buffer(void *priv, size_t min_size, vpx_codec_frame_buffer_t *fb)
50 {
51  VPxContext *ctx = priv;
52  AVBufferRef *buf;
53 
54  if (min_size > ctx->pool_size) {
56  /* According to the libvpx docs the buffer must be zeroed out. */
57  ctx->pool = av_buffer_pool_init(min_size, av_buffer_allocz);
58  if (!ctx->pool) {
59  ctx->pool_size = 0;
60  return AVERROR(ENOMEM);
61  }
62  ctx->pool_size = min_size;
63  }
64 
65  buf = av_buffer_pool_get(ctx->pool);
66  if (!buf)
67  return AVERROR(ENOMEM);
68 
69  fb->priv = buf;
70  fb->size = ctx->pool_size;
71  fb->data = buf->data;
72 
73  return 0;
74 }
75 
76 static int release_frame_buffer(void *priv, vpx_codec_frame_buffer_t *fb)
77 {
78  AVBufferRef *buf = fb->priv;
79  av_buffer_unref(&buf);
80  return 0;
81 }
82 
83 static av_cold int vpx_init(AVCodecContext *avctx,
84  struct vpx_codec_ctx* decoder,
85  const struct vpx_codec_iface *iface)
86 {
87  struct vpx_codec_dec_cfg deccfg = {
88  .threads = FFMIN(avctx->thread_count ? avctx->thread_count : av_cpu_count(), 16)
89  };
90 
91  av_log(avctx, AV_LOG_INFO, "%s\n", vpx_codec_version_str());
92  av_log(avctx, AV_LOG_VERBOSE, "%s\n", vpx_codec_build_config());
93 
94  if (vpx_codec_dec_init(decoder, iface, &deccfg, 0) != VPX_CODEC_OK) {
95  const char *error = vpx_codec_error(decoder);
96  av_log(avctx, AV_LOG_ERROR, "Failed to initialize decoder: %s\n",
97  error);
98  return AVERROR(EINVAL);
99  }
100 
101  if (avctx->codec_id == AV_CODEC_ID_VP9)
102  vpx_codec_set_frame_buffer_functions(decoder, get_frame_buffer, release_frame_buffer, avctx->priv_data);
103 
104  return 0;
105 }
106 
107 // returns 0 on success, AVERROR_INVALIDDATA otherwise
108 static int set_pix_fmt(AVCodecContext *avctx, struct vpx_image *img,
109  int has_alpha_channel)
110 {
111  static const enum AVColorSpace colorspaces[8] = {
114  };
115 #if VPX_IMAGE_ABI_VERSION >= 4
116  static const enum AVColorRange color_ranges[] = {
118  };
119  avctx->color_range = color_ranges[img->range];
120 #endif
121  avctx->colorspace = colorspaces[img->cs];
122  if (avctx->codec_id == AV_CODEC_ID_VP8 && img->fmt != VPX_IMG_FMT_I420)
123  return AVERROR_INVALIDDATA;
124  switch (img->fmt) {
125  case VPX_IMG_FMT_I420:
126  if (avctx->codec_id == AV_CODEC_ID_VP9)
127  avctx->profile = FF_PROFILE_VP9_0;
128  avctx->pix_fmt =
129  has_alpha_channel ? AV_PIX_FMT_YUVA420P : AV_PIX_FMT_YUV420P;
130  return 0;
131 #if CONFIG_LIBVPX_VP9_DECODER
132  case VPX_IMG_FMT_I422:
133  avctx->profile = FF_PROFILE_VP9_1;
134  avctx->pix_fmt = AV_PIX_FMT_YUV422P;
135  return 0;
136  case VPX_IMG_FMT_I440:
137  avctx->profile = FF_PROFILE_VP9_1;
138  avctx->pix_fmt = AV_PIX_FMT_YUV440P;
139  return 0;
140  case VPX_IMG_FMT_I444:
141  avctx->profile = FF_PROFILE_VP9_1;
142  avctx->pix_fmt = avctx->colorspace == AVCOL_SPC_RGB ?
144  return 0;
145  case VPX_IMG_FMT_I42016:
146  avctx->profile = FF_PROFILE_VP9_2;
147  if (img->bit_depth == 10) {
148  avctx->pix_fmt = AV_PIX_FMT_YUV420P10;
149  return 0;
150  } else if (img->bit_depth == 12) {
151  avctx->pix_fmt = AV_PIX_FMT_YUV420P12;
152  return 0;
153  } else {
154  return AVERROR_INVALIDDATA;
155  }
156  case VPX_IMG_FMT_I42216:
157  avctx->profile = FF_PROFILE_VP9_3;
158  if (img->bit_depth == 10) {
159  avctx->pix_fmt = AV_PIX_FMT_YUV422P10;
160  return 0;
161  } else if (img->bit_depth == 12) {
162  avctx->pix_fmt = AV_PIX_FMT_YUV422P12;
163  return 0;
164  } else {
165  return AVERROR_INVALIDDATA;
166  }
167  case VPX_IMG_FMT_I44016:
168  avctx->profile = FF_PROFILE_VP9_3;
169  if (img->bit_depth == 10) {
170  avctx->pix_fmt = AV_PIX_FMT_YUV440P10;
171  return 0;
172  } else if (img->bit_depth == 12) {
173  avctx->pix_fmt = AV_PIX_FMT_YUV440P12;
174  return 0;
175  } else {
176  return AVERROR_INVALIDDATA;
177  }
178  case VPX_IMG_FMT_I44416:
179  avctx->profile = FF_PROFILE_VP9_3;
180  if (img->bit_depth == 10) {
181  avctx->pix_fmt = avctx->colorspace == AVCOL_SPC_RGB ?
183  return 0;
184  } else if (img->bit_depth == 12) {
185  avctx->pix_fmt = avctx->colorspace == AVCOL_SPC_RGB ?
187  return 0;
188  } else {
189  return AVERROR_INVALIDDATA;
190  }
191 #endif
192  default:
193  return AVERROR_INVALIDDATA;
194  }
195 }
196 
197 static int decode_frame(AVCodecContext *avctx, vpx_codec_ctx_t *decoder,
198  uint8_t *data, uint32_t data_sz)
199 {
200  if (vpx_codec_decode(decoder, data, data_sz, NULL, 0) != VPX_CODEC_OK) {
201  const char *error = vpx_codec_error(decoder);
202  const char *detail = vpx_codec_error_detail(decoder);
203 
204  av_log(avctx, AV_LOG_ERROR, "Failed to decode frame: %s\n", error);
205  if (detail) {
206  av_log(avctx, AV_LOG_ERROR, " Additional information: %s\n",
207  detail);
208  }
209  return AVERROR_INVALIDDATA;
210  }
211  return 0;
212 }
213 
214 static int vpx_decode(AVCodecContext *avctx,
215  void *data, int *got_frame, AVPacket *avpkt)
216 {
217  VPxContext *ctx = avctx->priv_data;
218  AVFrame *picture = data;
219  const void *iter = NULL;
220  const void *iter_alpha = NULL;
221  struct vpx_image *img, *img_alpha;
222  int ret;
223  uint8_t *side_data = NULL;
224  int side_data_size;
225 
226  ret = decode_frame(avctx, &ctx->decoder, avpkt->data, avpkt->size);
227  if (ret)
228  return ret;
229 
230  side_data = av_packet_get_side_data(avpkt,
232  &side_data_size);
233  if (side_data_size >= 8) {
234  const uint64_t additional_id = AV_RB64(side_data);
235  side_data += 8;
236  side_data_size -= 8;
237  if (additional_id == 1) { // 1 stands for alpha channel data.
238  if (!ctx->has_alpha_channel) {
239  ctx->has_alpha_channel = 1;
240  ret = vpx_init(avctx,
241  &ctx->decoder_alpha,
242 #if CONFIG_LIBVPX_VP8_DECODER && CONFIG_LIBVPX_VP9_DECODER
243  (avctx->codec_id == AV_CODEC_ID_VP8) ?
244  &vpx_codec_vp8_dx_algo : &vpx_codec_vp9_dx_algo
245 #elif CONFIG_LIBVPX_VP8_DECODER
246  &vpx_codec_vp8_dx_algo
247 #else
248  &vpx_codec_vp9_dx_algo
249 #endif
250  );
251  if (ret)
252  return ret;
253  }
254  ret = decode_frame(avctx, &ctx->decoder_alpha, side_data,
255  side_data_size);
256  if (ret)
257  return ret;
258  }
259  }
260 
261  if ((img = vpx_codec_get_frame(&ctx->decoder, &iter)) &&
262  (!ctx->has_alpha_channel ||
263  (img_alpha = vpx_codec_get_frame(&ctx->decoder_alpha, &iter_alpha)))) {
264  uint8_t *planes[4];
265  int linesizes[4];
266 
267  if (img->d_w > img->w || img->d_h > img->h) {
268  av_log(avctx, AV_LOG_ERROR, "Display dimensions %dx%d exceed storage %dx%d\n",
269  img->d_w, img->d_h, img->w, img->h);
270  return AVERROR_EXTERNAL;
271  }
272 
273  if ((ret = set_pix_fmt(avctx, img, ctx->has_alpha_channel)) < 0) {
274  av_log(avctx, AV_LOG_ERROR, "Unsupported output colorspace (%d) / bit_depth (%d)\n",
275  img->fmt, img->bit_depth);
276  return ret;
277  }
278 
279  if ((int) img->d_w != avctx->width || (int) img->d_h != avctx->height) {
280  av_log(avctx, AV_LOG_INFO, "dimension change! %dx%d -> %dx%d\n",
281  avctx->width, avctx->height, img->d_w, img->d_h);
282  ret = ff_set_dimensions(avctx, img->d_w, img->d_h);
283  if (ret < 0)
284  return ret;
285  }
286 
287  if (ctx->has_alpha_channel &&
288  (img->d_w != img_alpha->d_w ||
289  img->d_h != img_alpha->d_h ||
290  img->bit_depth != img_alpha->bit_depth)) {
291  av_log(avctx, AV_LOG_ERROR,
292  "Video dimensions %dx%d@%dbpc differ from alpha dimensions %dx%d@%dbpc\n",
293  img->d_w, img->d_h, img->bit_depth,
294  img_alpha->d_w, img_alpha->d_h, img_alpha->bit_depth);
295  return AVERROR_INVALIDDATA;
296  }
297 
298  planes[0] = img->planes[VPX_PLANE_Y];
299  planes[1] = img->planes[VPX_PLANE_U];
300  planes[2] = img->planes[VPX_PLANE_V];
301  planes[3] =
302  ctx->has_alpha_channel ? img_alpha->planes[VPX_PLANE_Y] : NULL;
303  linesizes[0] = img->stride[VPX_PLANE_Y];
304  linesizes[1] = img->stride[VPX_PLANE_U];
305  linesizes[2] = img->stride[VPX_PLANE_V];
306  linesizes[3] =
307  ctx->has_alpha_channel ? img_alpha->stride[VPX_PLANE_Y] : 0;
308 
309  if (img->fb_priv && (!ctx->has_alpha_channel || img_alpha->fb_priv)) {
310  ret = ff_decode_frame_props(avctx, picture);
311  if (ret < 0)
312  return ret;
313  picture->buf[0] = av_buffer_ref(img->fb_priv);
314  if (!picture->buf[0])
315  return AVERROR(ENOMEM);
316  if (ctx->has_alpha_channel) {
317  picture->buf[1] = av_buffer_ref(img_alpha->fb_priv);
318  if (!picture->buf[1]) {
319  av_frame_unref(picture);
320  return AVERROR(ENOMEM);
321  }
322  }
323  for (int i = 0; i < 4; i++) {
324  picture->data[i] = planes[i];
325  picture->linesize[i] = linesizes[i];
326  }
327  } else {
328  if ((ret = ff_get_buffer(avctx, picture, 0)) < 0)
329  return ret;
330  av_image_copy(picture->data, picture->linesize, (const uint8_t**)planes,
331  linesizes, avctx->pix_fmt, img->d_w, img->d_h);
332  }
333  *got_frame = 1;
334  }
335  return avpkt->size;
336 }
337 
338 static av_cold int vpx_free(AVCodecContext *avctx)
339 {
340  VPxContext *ctx = avctx->priv_data;
341  vpx_codec_destroy(&ctx->decoder);
342  if (ctx->has_alpha_channel)
343  vpx_codec_destroy(&ctx->decoder_alpha);
345  return 0;
346 }
347 
348 #if CONFIG_LIBVPX_VP8_DECODER
349 static av_cold int vp8_init(AVCodecContext *avctx)
350 {
351  VPxContext *ctx = avctx->priv_data;
352  return vpx_init(avctx, &ctx->decoder, &vpx_codec_vp8_dx_algo);
353 }
354 
356  .name = "libvpx",
357  .long_name = NULL_IF_CONFIG_SMALL("libvpx VP8"),
358  .type = AVMEDIA_TYPE_VIDEO,
359  .id = AV_CODEC_ID_VP8,
360  .priv_data_size = sizeof(VPxContext),
361  .init = vp8_init,
362  .close = vpx_free,
363  .decode = vpx_decode,
365  .wrapper_name = "libvpx",
366 };
367 #endif /* CONFIG_LIBVPX_VP8_DECODER */
368 
369 #if CONFIG_LIBVPX_VP9_DECODER
370 static av_cold int vp9_init(AVCodecContext *avctx)
371 {
372  VPxContext *ctx = avctx->priv_data;
373  return vpx_init(avctx, &ctx->decoder, &vpx_codec_vp9_dx_algo);
374 }
375 
377  .name = "libvpx-vp9",
378  .long_name = NULL_IF_CONFIG_SMALL("libvpx VP9"),
379  .type = AVMEDIA_TYPE_VIDEO,
380  .id = AV_CODEC_ID_VP9,
381  .priv_data_size = sizeof(VPxContext),
382  .init = vp9_init,
383  .close = vpx_free,
384  .decode = vpx_decode,
385  .capabilities = AV_CODEC_CAP_AUTO_THREADS,
388  .wrapper_name = "libvpx",
389 };
390 #endif /* CONFIG_LIBVPX_VP9_DECODER */
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / SMPTE RP177 Annex B
Definition: pixfmt.h:514
#define NULL
Definition: coverity.c:32
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
#define AV_PIX_FMT_YUV440P10
Definition: pixfmt.h:401
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it...
Definition: buffer.c:125
This structure describes decoded (raw) audio or video data.
Definition: frame.h:308
static int get_frame_buffer(void *priv, size_t min_size, vpx_codec_frame_buffer_t *fb)
Definition: libvpxdec.c:49
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:100
int av_cpu_count(void)
Definition: cpu.c:275
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
misc image utilities
static av_cold int vpx_free(AVCodecContext *avctx)
Definition: libvpxdec.c:338
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
Definition: utils.c:106
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:499
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601 ...
Definition: pixfmt.h:518
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: avcodec.h:1161
planar GBR 4:4:4 24bpp
Definition: pixfmt.h:168
int size
Definition: packet.h:364
#define AV_PIX_FMT_GBRP10
Definition: pixfmt.h:415
AVCodec ff_libvpx_vp8_decoder
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC
Definition: pixfmt.h:519
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:736
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:403
int ff_decode_frame_props(AVCodecContext *avctx, AVFrame *frame)
Set various frame properties from the codec context / packet data.
Definition: decode.c:1704
static void error(const char *err)
#define AV_CODEC_CAP_AUTO_THREADS
Codec supports avctx->thread_count == 0 (auto).
Definition: codec.h:118
int profile
profile
Definition: avcodec.h:1859
AVCodec.
Definition: codec.h:190
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB)
Definition: pixfmt.h:513
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame, FILE *outfile)
Definition: decode_audio.c:71
functionally identical to above
Definition: pixfmt.h:520
static av_cold int vpx_init(AVCodecContext *avctx, struct vpx_codec_ctx *decoder, const struct vpx_codec_iface *iface)
Definition: libvpxdec.c:83
#define FF_PROFILE_VP9_0
Definition: avcodec.h:1942
#define img
int has_alpha_channel
Definition: libvpxdec.c:45
planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
Definition: pixfmt.h:101
uint8_t
#define av_cold
Definition: attributes.h:88
AVColorSpace
YUV colorspace type.
Definition: pixfmt.h:512
uint8_t * data
Definition: packet.h:363
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:210
AVColorRange
Visual content value range.
Definition: pixfmt.h:551
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:404
#define av_log(a,...)
static int vpx_decode(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
Definition: libvpxdec.c:214
The buffer pool.
static const struct @323 planes[]
AVCodec ff_libvpx_vp9_decoder
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:194
uint8_t * av_packet_get_side_data(const AVPacket *pkt, enum AVPacketSideDataType type, int *size)
Get side information from packet.
Definition: avpacket.c:353
#define FF_PROFILE_VP9_3
Definition: avcodec.h:1945
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:153
const char * name
Name of the codec implementation.
Definition: codec.h:197
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:402
void av_image_copy(uint8_t *dst_data[4], int dst_linesizes[4], const uint8_t *src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Copy image in src_data to dst_data.
Definition: imgutils.c:422
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
#define FF_PROFILE_VP9_2
Definition: avcodec.h:1944
struct vpx_codec_ctx decoder
Definition: libvpxdec.c:41
struct vpx_codec_ctx decoder_alpha
Definition: libvpxdec.c:42
AVBufferPool * pool
Definition: libvpxdec.c:43
#define FFMIN(a, b)
Definition: common.h:96
#define fb(width, name)
Definition: cbs_av1.c:548
static const chunk_decoder decoder[8]
Definition: dfa.c:330
int width
picture width / height.
Definition: avcodec.h:699
static av_cold void init_static_data(void)
Definition: mv30.c:665
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:523
AVFormatContext * ctx
Definition: movenc.c:48
size_t pool_size
Definition: libvpxdec.c:44
int thread_count
thread count is used to decide how many independent tasks should be passed to execute() ...
Definition: avcodec.h:1785
Full range content.
Definition: pixfmt.h:586
#define FF_PROFILE_VP9_1
Definition: avcodec.h:1943
#define AV_LOG_INFO
Standard information.
Definition: log.h:205
Libavcodec external API header.
enum AVCodecID codec_id
Definition: avcodec.h:536
av_cold void ff_vp9_init_static(AVCodec *codec)
Definition: libvpx.c:68
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:339
static av_cold int vp9_init(AVFormatContext *ctx, int st_index, PayloadContext *data)
Definition: rtpdec_vp9.c:34
static int set_pix_fmt(AVCodecContext *avctx, struct vpx_image *img, int has_alpha_channel)
Definition: libvpxdec.c:108
main external API structure.
Definition: avcodec.h:526
uint8_t * data
The data buffer.
Definition: buffer.h:89
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1879
Data found in BlockAdditional element of matroska container.
Definition: packet.h:191
AVBufferRef * av_buffer_allocz(int size)
Same as av_buffer_alloc(), except the returned buffer will be initialized to zero.
Definition: buffer.c:83
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:399
static const AVProfile profiles[]
void av_buffer_pool_uninit(AVBufferPool **ppool)
Mark the pool as being available for freeing.
Definition: buffer.c:303
enum AVColorSpace colorspace
YUV colorspace type.
Definition: avcodec.h:1154
#define AV_PIX_FMT_YUV440P12
Definition: pixfmt.h:405
int size
Size of data in bytes.
Definition: buffer.h:93
#define AV_PIX_FMT_GBRP12
Definition: pixfmt.h:416
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:566
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:400
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:406
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:322
Narrow or limited range content.
Definition: pixfmt.h:569
A reference to a data buffer.
Definition: buffer.h:81
static av_cold int vp8_init(AVFormatContext *s, int st_index, PayloadContext *vp8)
Definition: rtpdec_vp8.c:263
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_WL16 uint64_t_TMPL AV_RB64
Definition: bytestream.h:91
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
static int decode_frame(AVCodecContext *avctx, vpx_codec_ctx_t *decoder, uint8_t *data, uint32_t data_sz)
Definition: libvpxdec.c:197
common internal api header.
static int release_frame_buffer(void *priv, vpx_codec_frame_buffer_t *fb)
Definition: libvpxdec.c:76
common internal and external API header
AVBufferPool * av_buffer_pool_init(int size, AVBufferRef *(*alloc)(int size))
Allocate and initialize a buffer pool.
Definition: buffer.c:266
AVBufferRef * av_buffer_ref(AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:93
void * priv_data
Definition: avcodec.h:553
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:99
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:364
const AVProfile ff_vp9_profiles[]
Definition: profiles.c:133
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:57
This structure stores compressed data.
Definition: packet.h:340
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:50
int i
Definition: input.c:407