FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
hwcontext_vdpau.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 
21 #include <stdint.h>
22 #include <string.h>
23 
24 #include <vdpau/vdpau.h>
25 
26 #include "buffer.h"
27 #include "common.h"
28 #include "hwcontext.h"
29 #include "hwcontext_internal.h"
30 #include "hwcontext_vdpau.h"
31 #include "mem.h"
32 #include "pixfmt.h"
33 #include "pixdesc.h"
34 
35 typedef struct VDPAUDeviceContext {
36  VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities *get_transfer_caps;
37  VdpVideoSurfaceGetBitsYCbCr *get_data;
38  VdpVideoSurfacePutBitsYCbCr *put_data;
39  VdpVideoSurfaceCreate *surf_create;
40  VdpVideoSurfaceDestroy *surf_destroy;
41 
43  int nb_pix_fmts[3];
45 
46 typedef struct VDPAUFramesContext {
47  VdpVideoSurfaceGetBitsYCbCr *get_data;
48  VdpVideoSurfacePutBitsYCbCr *put_data;
49  VdpChromaType chroma_type;
51 
52  const enum AVPixelFormat *pix_fmts;
55 
56 typedef struct VDPAUPixFmtMap {
57  VdpYCbCrFormat vdpau_fmt;
60 
61 static const VDPAUPixFmtMap pix_fmts_420[] = {
62  { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV12 },
63  { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV420P },
64  { 0, AV_PIX_FMT_NONE, },
65 };
66 
67 static const VDPAUPixFmtMap pix_fmts_422[] = {
68  { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV16 },
69  { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV422P },
70  { VDP_YCBCR_FORMAT_UYVY, AV_PIX_FMT_UYVY422 },
71  { VDP_YCBCR_FORMAT_YUYV, AV_PIX_FMT_YUYV422 },
72  { 0, AV_PIX_FMT_NONE, },
73 };
74 
75 static const VDPAUPixFmtMap pix_fmts_444[] = {
76  { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV444P },
77  { 0, AV_PIX_FMT_NONE, },
78 };
79 
80 static const struct {
81  VdpChromaType chroma_type;
83 } vdpau_pix_fmts[] = {
84  { VDP_CHROMA_TYPE_420, pix_fmts_420 },
85  { VDP_CHROMA_TYPE_422, pix_fmts_422 },
86  { VDP_CHROMA_TYPE_444, pix_fmts_444 },
87 };
88 
89 static int count_pixfmts(const VDPAUPixFmtMap *map)
90 {
91  int count = 0;
92  while (map->pix_fmt != AV_PIX_FMT_NONE) {
93  map++;
94  count++;
95  }
96  return count;
97 }
98 
100 {
101  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
102  VDPAUDeviceContext *priv = ctx->internal->priv;
103  int i;
104 
105  for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++) {
106  const VDPAUPixFmtMap *map = vdpau_pix_fmts[i].map;
107  int nb_pix_fmts;
108 
109  nb_pix_fmts = count_pixfmts(map);
110  priv->pix_fmts[i] = av_malloc_array(nb_pix_fmts + 1, sizeof(*priv->pix_fmts[i]));
111  if (!priv->pix_fmts[i])
112  return AVERROR(ENOMEM);
113 
114  nb_pix_fmts = 0;
115  while (map->pix_fmt != AV_PIX_FMT_NONE) {
116  VdpBool supported;
117  VdpStatus err = priv->get_transfer_caps(hwctx->device, vdpau_pix_fmts[i].chroma_type,
118  map->vdpau_fmt, &supported);
119  if (err == VDP_STATUS_OK && supported)
120  priv->pix_fmts[i][nb_pix_fmts++] = map->pix_fmt;
121  map++;
122  }
123  priv->pix_fmts[i][nb_pix_fmts++] = AV_PIX_FMT_NONE;
124  priv->nb_pix_fmts[i] = nb_pix_fmts;
125  }
126 
127  return 0;
128 }
129 
131 {
132  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
133  VDPAUDeviceContext *priv = ctx->internal->priv;
134  VdpStatus err;
135  int ret;
136 
137 #define GET_CALLBACK(id, result) \
138 do { \
139  void *tmp; \
140  err = hwctx->get_proc_address(hwctx->device, id, &tmp); \
141  if (err != VDP_STATUS_OK) { \
142  av_log(ctx, AV_LOG_ERROR, "Error getting the " #id " callback.\n"); \
143  return AVERROR_UNKNOWN; \
144  } \
145  priv->result = tmp; \
146 } while (0)
147 
148  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_QUERY_GET_PUT_BITS_Y_CB_CR_CAPABILITIES,
149  get_transfer_caps);
150  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_GET_BITS_Y_CB_CR, get_data);
151  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_PUT_BITS_Y_CB_CR, put_data);
152  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_CREATE, surf_create);
153  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_DESTROY, surf_destroy);
154 
155  ret = vdpau_init_pixmfts(ctx);
156  if (ret < 0) {
157  av_log(ctx, AV_LOG_ERROR, "Error querying the supported pixel formats\n");
158  return ret;
159  }
160 
161  return 0;
162 }
163 #undef GET_CALLBACK
164 
166 {
167  VDPAUDeviceContext *priv = ctx->internal->priv;
168  int i;
169 
170  for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++)
171  av_freep(&priv->pix_fmts[i]);
172 }
173 
174 static void vdpau_buffer_free(void *opaque, uint8_t *data)
175 {
176  AVHWFramesContext *ctx = opaque;
177  VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
178  VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)data;
179 
180  device_priv->surf_destroy(surf);
181 }
182 
183 static AVBufferRef *vdpau_pool_alloc(void *opaque, int size)
184 {
185  AVHWFramesContext *ctx = opaque;
186  VDPAUFramesContext *priv = ctx->internal->priv;
187  AVVDPAUDeviceContext *device_hwctx = ctx->device_ctx->hwctx;
188  VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
189 
190  AVBufferRef *ret;
191  VdpVideoSurface surf;
192  VdpStatus err;
193 
194  err = device_priv->surf_create(device_hwctx->device, priv->chroma_type,
195  ctx->width, ctx->height, &surf);
196  if (err != VDP_STATUS_OK) {
197  av_log(ctx, AV_LOG_ERROR, "Error allocating a VDPAU video surface\n");
198  return NULL;
199  }
200 
201  ret = av_buffer_create((uint8_t*)(uintptr_t)surf, sizeof(surf),
203  if (!ret) {
204  device_priv->surf_destroy(surf);
205  return NULL;
206  }
207 
208  return ret;
209 }
210 
212 {
213  VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
214  VDPAUFramesContext *priv = ctx->internal->priv;
215 
216  int i;
217 
218  switch (ctx->sw_format) {
219  case AV_PIX_FMT_YUV420P: priv->chroma_type = VDP_CHROMA_TYPE_420; break;
220  case AV_PIX_FMT_YUV422P: priv->chroma_type = VDP_CHROMA_TYPE_422; break;
221  case AV_PIX_FMT_YUV444P: priv->chroma_type = VDP_CHROMA_TYPE_444; break;
222  default:
223  av_log(ctx, AV_LOG_ERROR, "Unsupported data layout: %s\n",
225  return AVERROR(ENOSYS);
226  }
227 
228  for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) {
229  if (vdpau_pix_fmts[i].chroma_type == priv->chroma_type) {
230  priv->chroma_idx = i;
231  priv->pix_fmts = device_priv->pix_fmts[i];
232  priv->nb_pix_fmts = device_priv->nb_pix_fmts[i];
233  break;
234  }
235  }
236  if (!priv->pix_fmts) {
237  av_log(ctx, AV_LOG_ERROR, "Unsupported chroma type: %d\n", priv->chroma_type);
238  return AVERROR(ENOSYS);
239  }
240 
241  if (!ctx->pool) {
242  ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(VdpVideoSurface), ctx,
244  if (!ctx->internal->pool_internal)
245  return AVERROR(ENOMEM);
246  }
247 
248  priv->get_data = device_priv->get_data;
249  priv->put_data = device_priv->put_data;
250 
251  return 0;
252 }
253 
255 {
256  frame->buf[0] = av_buffer_pool_get(ctx->pool);
257  if (!frame->buf[0])
258  return AVERROR(ENOMEM);
259 
260  frame->data[3] = frame->buf[0]->data;
261  frame->format = AV_PIX_FMT_VDPAU;
262  frame->width = ctx->width;
263  frame->height = ctx->height;
264 
265  return 0;
266 }
267 
270  enum AVPixelFormat **formats)
271 {
272  VDPAUFramesContext *priv = ctx->internal->priv;
273 
274  enum AVPixelFormat *fmts;
275 
276  if (priv->nb_pix_fmts == 1) {
277  av_log(ctx, AV_LOG_ERROR,
278  "No target formats are supported for this chroma type\n");
279  return AVERROR(ENOSYS);
280  }
281 
282  fmts = av_malloc_array(priv->nb_pix_fmts, sizeof(*fmts));
283  if (!fmts)
284  return AVERROR(ENOMEM);
285 
286  memcpy(fmts, priv->pix_fmts, sizeof(*fmts) * (priv->nb_pix_fmts));
287  *formats = fmts;
288 
289  return 0;
290 }
291 
293  const AVFrame *src)
294 {
295  VDPAUFramesContext *priv = ctx->internal->priv;
296  VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)src->data[3];
297 
298  void *data[3];
299  uint32_t linesize[3];
300 
301  const VDPAUPixFmtMap *map;
302  VdpYCbCrFormat vdpau_format;
303  VdpStatus err;
304  int i;
305 
306  for (i = 0; i< FF_ARRAY_ELEMS(data) && dst->data[i]; i++) {
307  data[i] = dst->data[i];
308  if (dst->linesize[i] < 0 || (uint64_t)dst->linesize > UINT32_MAX) {
309  av_log(ctx, AV_LOG_ERROR,
310  "The linesize %d cannot be represented as uint32\n",
311  dst->linesize[i]);
312  return AVERROR(ERANGE);
313  }
314  linesize[i] = dst->linesize[i];
315  }
316 
317  map = vdpau_pix_fmts[priv->chroma_idx].map;
318  for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
319  if (map[i].pix_fmt == dst->format) {
320  vdpau_format = map[i].vdpau_fmt;
321  break;
322  }
323  }
324  if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
325  av_log(ctx, AV_LOG_ERROR,
326  "Unsupported target pixel format: %s\n",
328  return AVERROR(EINVAL);
329  }
330 
331  if (vdpau_format == VDP_YCBCR_FORMAT_YV12)
332  FFSWAP(void*, data[1], data[2]);
333 
334  err = priv->get_data(surf, vdpau_format, data, linesize);
335  if (err != VDP_STATUS_OK) {
336  av_log(ctx, AV_LOG_ERROR, "Error retrieving the data from a VDPAU surface\n");
337  return AVERROR_UNKNOWN;
338  }
339 
340  return 0;
341 }
342 
344  const AVFrame *src)
345 {
346  VDPAUFramesContext *priv = ctx->internal->priv;
347  VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)dst->data[3];
348 
349  const void *data[3];
350  uint32_t linesize[3];
351 
352  const VDPAUPixFmtMap *map;
353  VdpYCbCrFormat vdpau_format;
354  VdpStatus err;
355  int i;
356 
357  for (i = 0; i< FF_ARRAY_ELEMS(data) && src->data[i]; i++) {
358  data[i] = src->data[i];
359  if (src->linesize[i] < 0 || (uint64_t)src->linesize > UINT32_MAX) {
360  av_log(ctx, AV_LOG_ERROR,
361  "The linesize %d cannot be represented as uint32\n",
362  src->linesize[i]);
363  return AVERROR(ERANGE);
364  }
365  linesize[i] = src->linesize[i];
366  }
367 
368  map = vdpau_pix_fmts[priv->chroma_idx].map;
369  for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
370  if (map[i].pix_fmt == src->format) {
371  vdpau_format = map[i].vdpau_fmt;
372  break;
373  }
374  }
375  if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
376  av_log(ctx, AV_LOG_ERROR,
377  "Unsupported source pixel format: %s\n",
379  return AVERROR(EINVAL);
380  }
381 
382  if (vdpau_format == VDP_YCBCR_FORMAT_YV12)
383  FFSWAP(const void*, data[1], data[2]);
384 
385  err = priv->put_data(surf, vdpau_format, data, linesize);
386  if (err != VDP_STATUS_OK) {
387  av_log(ctx, AV_LOG_ERROR, "Error uploading the data to a VDPAU surface\n");
388  return AVERROR_UNKNOWN;
389  }
390 
391  return 0;
392 }
393 
394 #if HAVE_VDPAU_X11
395 #include <vdpau/vdpau_x11.h>
396 #include <X11/Xlib.h>
397 
398 typedef struct VDPAUDevicePriv {
399  VdpDeviceDestroy *device_destroy;
400  Display *dpy;
401 } VDPAUDevicePriv;
402 
403 static void vdpau_device_free(AVHWDeviceContext *ctx)
404 {
405  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
406  VDPAUDevicePriv *priv = ctx->user_opaque;
407 
408  if (priv->device_destroy)
409  priv->device_destroy(hwctx->device);
410  if (priv->dpy)
411  XCloseDisplay(priv->dpy);
412  av_freep(&priv);
413 }
414 
415 static int vdpau_device_create(AVHWDeviceContext *ctx, const char *device,
416  AVDictionary *opts, int flags)
417 {
418  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
419 
420  VDPAUDevicePriv *priv;
421  VdpStatus err;
422  VdpGetInformationString *get_information_string;
423  const char *display, *vendor;
424 
425  priv = av_mallocz(sizeof(*priv));
426  if (!priv)
427  return AVERROR(ENOMEM);
428 
429  ctx->user_opaque = priv;
430  ctx->free = vdpau_device_free;
431 
432  priv->dpy = XOpenDisplay(device);
433  if (!priv->dpy) {
434  av_log(ctx, AV_LOG_ERROR, "Cannot open the X11 display %s.\n",
435  XDisplayName(device));
436  return AVERROR_UNKNOWN;
437  }
438  display = XDisplayString(priv->dpy);
439 
440  err = vdp_device_create_x11(priv->dpy, XDefaultScreen(priv->dpy),
441  &hwctx->device, &hwctx->get_proc_address);
442  if (err != VDP_STATUS_OK) {
443  av_log(ctx, AV_LOG_ERROR, "VDPAU device creation on X11 display %s failed.\n",
444  display);
445  return AVERROR_UNKNOWN;
446  }
447 
448 #define GET_CALLBACK(id, result) \
449 do { \
450  void *tmp; \
451  err = hwctx->get_proc_address(hwctx->device, id, &tmp); \
452  if (err != VDP_STATUS_OK) { \
453  av_log(ctx, AV_LOG_ERROR, "Error getting the " #id " callback.\n"); \
454  return AVERROR_UNKNOWN; \
455  } \
456  result = tmp; \
457 } while (0)
458 
459  GET_CALLBACK(VDP_FUNC_ID_GET_INFORMATION_STRING, get_information_string);
460  GET_CALLBACK(VDP_FUNC_ID_DEVICE_DESTROY, priv->device_destroy);
461 
462  get_information_string(&vendor);
463  av_log(ctx, AV_LOG_VERBOSE, "Successfully created a VDPAU device (%s) on "
464  "X11 display %s\n", vendor, display);
465 
466  return 0;
467 }
468 #endif
469 
472  .name = "VDPAU",
473 
474  .device_hwctx_size = sizeof(AVVDPAUDeviceContext),
475  .device_priv_size = sizeof(VDPAUDeviceContext),
476  .frames_priv_size = sizeof(VDPAUFramesContext),
477 
478 #if HAVE_VDPAU_X11
479  .device_create = vdpau_device_create,
480 #endif
482  .device_uninit = vdpau_device_uninit,
483  .frames_init = vdpau_frames_init,
484  .frames_get_buffer = vdpau_get_buffer,
485  .transfer_get_formats = vdpau_transfer_get_formats,
486  .transfer_data_to = vdpau_transfer_data_to,
487  .transfer_data_from = vdpau_transfer_data_from,
488 
490 };
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:54
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
Definition: pixfmt.h:82
#define NULL
Definition: coverity.c:32
This struct is allocated as AVHWDeviceContext.hwctx.
static enum AVPixelFormat pix_fmt
This structure describes decoded (raw) audio or video data.
Definition: frame.h:184
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:101
#define GET_CALLBACK(id, result)
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:67
static int device_init(AVFormatContext *ctx, int *width, int *height, uint32_t pixelformat)
Definition: v4l2.c:186
Memory handling functions.
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:367
VdpGetProcAddress * get_proc_address
static int vdpau_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
VdpVideoSurfacePutBitsYCbCr * put_data
VdpVideoSurfacePutBitsYCbCr * put_data
static enum AVSampleFormat formats[]
Definition: avresample.c:163
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:222
VdpChromaType chroma_type
static int vdpau_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
const HWContextType ff_hwcontext_type_vdpau
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:252
VdpVideoSurfaceGetBitsYCbCr * get_data
static int vdpau_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
An API-specific header for AV_HWDEVICE_TYPE_VDPAU.
enum AVPixelFormat * pix_fmts
AVBufferPool * pool_internal
enum AVHWDeviceType type
uint8_t
VdpChromaType chroma_type
static int vdpau_init_pixmfts(AVHWDeviceContext *ctx)
static int vdpau_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
static AVFrame * frame
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:85
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:192
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:113
ptrdiff_t size
Definition: opengl_enc.c:101
enum AVPixelFormat * pix_fmts[3]
#define av_log(a,...)
static const VDPAUPixFmtMap pix_fmts_422[]
int width
width and height of the video frame
Definition: frame.h:236
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
static void vdpau_buffer_free(void *opaque, uint8_t *data)
void(* free)(struct AVHWDeviceContext *ctx)
This field may be set by the caller before calling av_hwdevice_ctx_init().
Definition: hwcontext.h:97
#define AVERROR(e)
Definition: error.h:43
static const struct @238 vdpau_pix_fmts[]
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:90
AVBufferRef * av_buffer_create(uint8_t *data, int size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:28
GLsizei count
Definition: opengl_enc.c:109
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:66
AVDictionary * opts
Definition: movenc.c:50
static int vdpau_device_init(AVHWDeviceContext *ctx)
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:142
static const VDPAUPixFmtMap pix_fmts_444[]
AVFormatContext * ctx
Definition: movenc.c:48
VdpYCbCrFormat vdpau_fmt
static int vdpau_frames_init(AVHWFramesContext *ctx)
#define src
Definition: vp9dsp.c:530
AVBufferPool * av_buffer_pool_init2(int size, void *opaque, AVBufferRef *(*alloc)(void *opaque, int size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
Definition: buffer.c:217
#define FF_ARRAY_ELEMS(a)
VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities * get_transfer_caps
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:248
VdpVideoSurfaceCreate * surf_create
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:215
uint8_t * data
The data buffer.
Definition: buffer.h:89
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:63
HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface.
Definition: pixfmt.h:209
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:117
refcounted data buffer API
const VDPAUPixFmtMap * map
AVHWFramesInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:127
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:262
static int flags
Definition: cpu.c:47
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:198
void * user_opaque
Arbitrary user data, to be used e.g.
Definition: hwcontext.h:102
A reference to a data buffer.
Definition: buffer.h:81
static void vdpau_device_uninit(AVHWDeviceContext *ctx)
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:62
common internal and external API header
static int count_pixfmts(const VDPAUPixFmtMap *map)
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:71
interleaved chroma YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:213
enum AVPixelFormat pix_fmt
static const VDPAUPixFmtMap pix_fmts_420[]
VdpVideoSurfaceDestroy * surf_destroy
AVHWFrameTransferDirection
Definition: hwcontext.h:328
pixel format definitions
AVBufferPool * pool
A pool from which the frames are allocated by av_hwframe_get_buffer().
Definition: hwcontext.h:183
int height
Definition: frame.h:236
static AVBufferRef * vdpau_pool_alloc(void *opaque, int size)
#define av_freep(p)
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:380
#define av_malloc_array(a, b)
#define FFSWAP(type, a, b)
Definition: common.h:99
AVHWDeviceInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:64
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2182
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:215
AVPixelFormat
Pixel format.
Definition: pixfmt.h:60
VdpVideoSurfaceGetBitsYCbCr * get_data