FFmpeg
hwcontext_vdpau.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 
21 #include <stdint.h>
22 #include <string.h>
23 
24 #include <vdpau/vdpau.h>
25 
26 #include "buffer.h"
27 #include "common.h"
28 #include "hwcontext.h"
29 #include "hwcontext_internal.h"
30 #include "hwcontext_vdpau.h"
31 #include "mem.h"
32 #include "pixfmt.h"
33 #include "pixdesc.h"
34 
35 typedef struct VDPAUPixFmtMap {
36  VdpYCbCrFormat vdpau_fmt;
39 
40 static const VDPAUPixFmtMap pix_fmts_420[] = {
41  { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV12 },
42  { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV420P },
43 #ifdef VDP_YCBCR_FORMAT_P016
44  { VDP_YCBCR_FORMAT_P016, AV_PIX_FMT_P016 },
45  { VDP_YCBCR_FORMAT_P010, AV_PIX_FMT_P010 },
46 #endif
47  { 0, AV_PIX_FMT_NONE, },
48 };
49 
50 static const VDPAUPixFmtMap pix_fmts_422[] = {
51  { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV16 },
52  { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV422P },
53  { VDP_YCBCR_FORMAT_UYVY, AV_PIX_FMT_UYVY422 },
54  { VDP_YCBCR_FORMAT_YUYV, AV_PIX_FMT_YUYV422 },
55  { 0, AV_PIX_FMT_NONE, },
56 };
57 
58 static const VDPAUPixFmtMap pix_fmts_444[] = {
59 #ifdef VDP_YCBCR_FORMAT_Y_U_V_444
60  { VDP_YCBCR_FORMAT_Y_U_V_444, AV_PIX_FMT_YUV444P },
61 #endif
62 #ifdef VDP_YCBCR_FORMAT_P016
63  {VDP_YCBCR_FORMAT_Y_U_V_444_16, AV_PIX_FMT_YUV444P16},
64 #endif
65  { 0, AV_PIX_FMT_NONE, },
66 };
67 
68 static const struct {
69  VdpChromaType chroma_type;
72 } vdpau_pix_fmts[] = {
73  { VDP_CHROMA_TYPE_420, AV_PIX_FMT_YUV420P, pix_fmts_420 },
74  { VDP_CHROMA_TYPE_422, AV_PIX_FMT_YUV422P, pix_fmts_422 },
75  { VDP_CHROMA_TYPE_444, AV_PIX_FMT_YUV444P, pix_fmts_444 },
76 #ifdef VDP_YCBCR_FORMAT_P016
77  { VDP_CHROMA_TYPE_420_16, AV_PIX_FMT_YUV420P10, pix_fmts_420 },
78  { VDP_CHROMA_TYPE_420_16, AV_PIX_FMT_YUV420P12, pix_fmts_420 },
79  { VDP_CHROMA_TYPE_422_16, AV_PIX_FMT_YUV422P10, pix_fmts_422 },
80  { VDP_CHROMA_TYPE_444_16, AV_PIX_FMT_YUV444P10, pix_fmts_444 },
81  { VDP_CHROMA_TYPE_444_16, AV_PIX_FMT_YUV444P12, pix_fmts_444 },
82 #endif
83 };
84 
85 typedef struct VDPAUDeviceContext {
86  VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities *get_transfer_caps;
87  VdpVideoSurfaceGetBitsYCbCr *get_data;
88  VdpVideoSurfacePutBitsYCbCr *put_data;
89  VdpVideoSurfaceCreate *surf_create;
90  VdpVideoSurfaceDestroy *surf_destroy;
91 
95 
96 typedef struct VDPAUFramesContext {
97  VdpVideoSurfaceGetBitsYCbCr *get_data;
98  VdpVideoSurfacePutBitsYCbCr *put_data;
99  VdpChromaType chroma_type;
101 
102  const enum AVPixelFormat *pix_fmts;
105 
106 static int count_pixfmts(const VDPAUPixFmtMap *map)
107 {
108  int count = 0;
109  while (map->pix_fmt != AV_PIX_FMT_NONE) {
110  map++;
111  count++;
112  }
113  return count;
114 }
115 
117 {
118  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
119  VDPAUDeviceContext *priv = ctx->internal->priv;
120  int i;
121 
122  for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++) {
123  const VDPAUPixFmtMap *map = vdpau_pix_fmts[i].map;
124  int nb_pix_fmts;
125 
126  nb_pix_fmts = count_pixfmts(map);
127  priv->pix_fmts[i] = av_malloc_array(nb_pix_fmts + 1, sizeof(*priv->pix_fmts[i]));
128  if (!priv->pix_fmts[i])
129  return AVERROR(ENOMEM);
130 
131  nb_pix_fmts = 0;
132  while (map->pix_fmt != AV_PIX_FMT_NONE) {
133  VdpBool supported;
134  VdpStatus err = priv->get_transfer_caps(hwctx->device, vdpau_pix_fmts[i].chroma_type,
135  map->vdpau_fmt, &supported);
136  if (err == VDP_STATUS_OK && supported)
137  priv->pix_fmts[i][nb_pix_fmts++] = map->pix_fmt;
138  map++;
139  }
140  priv->pix_fmts[i][nb_pix_fmts++] = AV_PIX_FMT_NONE;
141  priv->nb_pix_fmts[i] = nb_pix_fmts;
142  }
143 
144  return 0;
145 }
146 
147 #define GET_CALLBACK(id, result) \
148 do { \
149  void *tmp; \
150  err = hwctx->get_proc_address(hwctx->device, id, &tmp); \
151  if (err != VDP_STATUS_OK) { \
152  av_log(ctx, AV_LOG_ERROR, "Error getting the " #id " callback.\n"); \
153  return AVERROR_UNKNOWN; \
154  } \
155  result = tmp; \
156 } while (0)
157 
159 {
160  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
161  VDPAUDeviceContext *priv = ctx->internal->priv;
162  VdpStatus err;
163  int ret;
164 
165  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_QUERY_GET_PUT_BITS_Y_CB_CR_CAPABILITIES,
166  priv->get_transfer_caps);
167  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_GET_BITS_Y_CB_CR, priv->get_data);
168  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_PUT_BITS_Y_CB_CR, priv->put_data);
169  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_CREATE, priv->surf_create);
170  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_DESTROY, priv->surf_destroy);
171 
173  if (ret < 0) {
174  av_log(ctx, AV_LOG_ERROR, "Error querying the supported pixel formats\n");
175  return ret;
176  }
177 
178  return 0;
179 }
180 
182 {
183  VDPAUDeviceContext *priv = ctx->internal->priv;
184  int i;
185 
186  for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++)
187  av_freep(&priv->pix_fmts[i]);
188 }
189 
191  const void *hwconfig,
192  AVHWFramesConstraints *constraints)
193 {
194  VDPAUDeviceContext *priv = ctx->internal->priv;
195  int nb_sw_formats = 0;
196  int i;
197 
199  sizeof(*constraints->valid_sw_formats));
200  if (!constraints->valid_sw_formats)
201  return AVERROR(ENOMEM);
202 
203  for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) {
204  if (priv->nb_pix_fmts[i] > 1)
205  constraints->valid_sw_formats[nb_sw_formats++] = vdpau_pix_fmts[i].frames_sw_format;
206  }
207  constraints->valid_sw_formats[nb_sw_formats] = AV_PIX_FMT_NONE;
208 
209  constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
210  if (!constraints->valid_hw_formats)
211  return AVERROR(ENOMEM);
212 
213  constraints->valid_hw_formats[0] = AV_PIX_FMT_VDPAU;
214  constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
215 
216  return 0;
217 }
218 
219 static void vdpau_buffer_free(void *opaque, uint8_t *data)
220 {
221  AVHWFramesContext *ctx = opaque;
222  VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
223  VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)data;
224 
225  device_priv->surf_destroy(surf);
226 }
227 
228 static AVBufferRef *vdpau_pool_alloc(void *opaque, size_t size)
229 {
230  AVHWFramesContext *ctx = opaque;
231  VDPAUFramesContext *priv = ctx->internal->priv;
232  AVVDPAUDeviceContext *device_hwctx = ctx->device_ctx->hwctx;
233  VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
234 
235  AVBufferRef *ret;
236  VdpVideoSurface surf;
237  VdpStatus err;
238 
239  err = device_priv->surf_create(device_hwctx->device, priv->chroma_type,
240  ctx->width, ctx->height, &surf);
241  if (err != VDP_STATUS_OK) {
242  av_log(ctx, AV_LOG_ERROR, "Error allocating a VDPAU video surface\n");
243  return NULL;
244  }
245 
246  ret = av_buffer_create((uint8_t*)(uintptr_t)surf, sizeof(surf),
248  if (!ret) {
249  device_priv->surf_destroy(surf);
250  return NULL;
251  }
252 
253  return ret;
254 }
255 
257 {
258  VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
259  VDPAUFramesContext *priv = ctx->internal->priv;
260 
261  int i;
262 
263  for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) {
264  if (vdpau_pix_fmts[i].frames_sw_format == ctx->sw_format) {
265  priv->chroma_type = vdpau_pix_fmts[i].chroma_type;
266  priv->chroma_idx = i;
267  priv->pix_fmts = device_priv->pix_fmts[i];
268  priv->nb_pix_fmts = device_priv->nb_pix_fmts[i];
269  break;
270  }
271  }
272  if (priv->nb_pix_fmts < 2) {
273  av_log(ctx, AV_LOG_ERROR, "Unsupported sw format: %s\n",
274  av_get_pix_fmt_name(ctx->sw_format));
275  return AVERROR(ENOSYS);
276  }
277 
278  if (!ctx->pool) {
279  ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(VdpVideoSurface), ctx,
281  if (!ctx->internal->pool_internal)
282  return AVERROR(ENOMEM);
283  }
284 
285  priv->get_data = device_priv->get_data;
286  priv->put_data = device_priv->put_data;
287 
288  return 0;
289 }
290 
292 {
293  frame->buf[0] = av_buffer_pool_get(ctx->pool);
294  if (!frame->buf[0])
295  return AVERROR(ENOMEM);
296 
297  frame->data[3] = frame->buf[0]->data;
298  frame->format = AV_PIX_FMT_VDPAU;
299  frame->width = ctx->width;
300  frame->height = ctx->height;
301 
302  return 0;
303 }
304 
307  enum AVPixelFormat **formats)
308 {
309  VDPAUFramesContext *priv = ctx->internal->priv;
310 
311  enum AVPixelFormat *fmts;
312 
313  if (priv->nb_pix_fmts == 1) {
315  "No target formats are supported for this chroma type\n");
316  return AVERROR(ENOSYS);
317  }
318 
319  fmts = av_malloc_array(priv->nb_pix_fmts, sizeof(*fmts));
320  if (!fmts)
321  return AVERROR(ENOMEM);
322 
323  memcpy(fmts, priv->pix_fmts, sizeof(*fmts) * (priv->nb_pix_fmts));
324  *formats = fmts;
325 
326  return 0;
327 }
328 
330  const AVFrame *src)
331 {
332  VDPAUFramesContext *priv = ctx->internal->priv;
333  VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)src->data[3];
334 
335  void *data[3];
336  uint32_t linesize[3];
337 
338  const VDPAUPixFmtMap *map;
339  VdpYCbCrFormat vdpau_format;
340  VdpStatus err;
341  int i;
342 
343  for (i = 0; i< FF_ARRAY_ELEMS(data) && dst->data[i]; i++) {
344  data[i] = dst->data[i];
345  if (dst->linesize[i] < 0 || dst->linesize[i] > UINT32_MAX) {
347  "The linesize %d cannot be represented as uint32\n",
348  dst->linesize[i]);
349  return AVERROR(ERANGE);
350  }
351  linesize[i] = dst->linesize[i];
352  }
353 
354  map = vdpau_pix_fmts[priv->chroma_idx].map;
355  for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
356  if (map[i].pix_fmt == dst->format) {
357  vdpau_format = map[i].vdpau_fmt;
358  break;
359  }
360  }
361  if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
363  "Unsupported target pixel format: %s\n",
365  return AVERROR(EINVAL);
366  }
367 
368  if ((vdpau_format == VDP_YCBCR_FORMAT_YV12)
369 #ifdef VDP_YCBCR_FORMAT_Y_U_V_444
370  || (vdpau_format == VDP_YCBCR_FORMAT_Y_U_V_444)
371 #endif
372 #ifdef VDP_YCBCR_FORMAT_P016
373  || (vdpau_format == VDP_YCBCR_FORMAT_Y_U_V_444_16)
374 #endif
375  )
376  FFSWAP(void*, data[1], data[2]);
377 
378  err = priv->get_data(surf, vdpau_format, data, linesize);
379  if (err != VDP_STATUS_OK) {
380  av_log(ctx, AV_LOG_ERROR, "Error retrieving the data from a VDPAU surface\n");
381  return AVERROR_UNKNOWN;
382  }
383 
384  return 0;
385 }
386 
388  const AVFrame *src)
389 {
390  VDPAUFramesContext *priv = ctx->internal->priv;
391  VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)dst->data[3];
392 
393  const void *data[3];
394  uint32_t linesize[3];
395 
396  const VDPAUPixFmtMap *map;
397  VdpYCbCrFormat vdpau_format;
398  VdpStatus err;
399  int i;
400 
401  for (i = 0; i< FF_ARRAY_ELEMS(data) && src->data[i]; i++) {
402  data[i] = src->data[i];
403  if (src->linesize[i] < 0 || src->linesize[i] > UINT32_MAX) {
405  "The linesize %d cannot be represented as uint32\n",
406  src->linesize[i]);
407  return AVERROR(ERANGE);
408  }
409  linesize[i] = src->linesize[i];
410  }
411 
412  map = vdpau_pix_fmts[priv->chroma_idx].map;
413  for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
414  if (map[i].pix_fmt == src->format) {
415  vdpau_format = map[i].vdpau_fmt;
416  break;
417  }
418  }
419  if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
421  "Unsupported source pixel format: %s\n",
422  av_get_pix_fmt_name(src->format));
423  return AVERROR(EINVAL);
424  }
425 
426  if ((vdpau_format == VDP_YCBCR_FORMAT_YV12)
427 #ifdef VDP_YCBCR_FORMAT_Y_U_V_444
428  || (vdpau_format == VDP_YCBCR_FORMAT_Y_U_V_444)
429 #endif
430  )
431  FFSWAP(const void*, data[1], data[2]);
432 
433  err = priv->put_data(surf, vdpau_format, data, linesize);
434  if (err != VDP_STATUS_OK) {
435  av_log(ctx, AV_LOG_ERROR, "Error uploading the data to a VDPAU surface\n");
436  return AVERROR_UNKNOWN;
437  }
438 
439  return 0;
440 }
441 
442 #if HAVE_VDPAU_X11
443 #include <vdpau/vdpau_x11.h>
444 #include <X11/Xlib.h>
445 
446 typedef struct VDPAUDevicePriv {
447  VdpDeviceDestroy *device_destroy;
448  Display *dpy;
449 } VDPAUDevicePriv;
450 
451 static void vdpau_device_free(AVHWDeviceContext *ctx)
452 {
453  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
454  VDPAUDevicePriv *priv = ctx->user_opaque;
455 
456  if (priv->device_destroy)
457  priv->device_destroy(hwctx->device);
458  if (priv->dpy)
459  XCloseDisplay(priv->dpy);
460  av_freep(&priv);
461 }
462 
463 static int vdpau_device_create(AVHWDeviceContext *ctx, const char *device,
464  AVDictionary *opts, int flags)
465 {
466  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
467 
468  VDPAUDevicePriv *priv;
469  VdpStatus err;
470  VdpGetInformationString *get_information_string;
471  const char *display, *vendor;
472 
473  priv = av_mallocz(sizeof(*priv));
474  if (!priv)
475  return AVERROR(ENOMEM);
476 
477  ctx->user_opaque = priv;
478  ctx->free = vdpau_device_free;
479 
480  priv->dpy = XOpenDisplay(device);
481  if (!priv->dpy) {
482  av_log(ctx, AV_LOG_ERROR, "Cannot open the X11 display %s.\n",
483  XDisplayName(device));
484  return AVERROR_UNKNOWN;
485  }
486  display = XDisplayString(priv->dpy);
487 
488  err = vdp_device_create_x11(priv->dpy, XDefaultScreen(priv->dpy),
489  &hwctx->device, &hwctx->get_proc_address);
490  if (err != VDP_STATUS_OK) {
491  av_log(ctx, AV_LOG_ERROR, "VDPAU device creation on X11 display %s failed.\n",
492  display);
493  return AVERROR_UNKNOWN;
494  }
495 
496  GET_CALLBACK(VDP_FUNC_ID_GET_INFORMATION_STRING, get_information_string);
497  GET_CALLBACK(VDP_FUNC_ID_DEVICE_DESTROY, priv->device_destroy);
498 
499  get_information_string(&vendor);
500  av_log(ctx, AV_LOG_VERBOSE, "Successfully created a VDPAU device (%s) on "
501  "X11 display %s\n", vendor, display);
502 
503  return 0;
504 }
505 #endif
506 
509  .name = "VDPAU",
510 
511  .device_hwctx_size = sizeof(AVVDPAUDeviceContext),
512  .device_priv_size = sizeof(VDPAUDeviceContext),
513  .frames_priv_size = sizeof(VDPAUFramesContext),
514 
515 #if HAVE_VDPAU_X11
516  .device_create = vdpau_device_create,
517 #endif
519  .device_uninit = vdpau_device_uninit,
520  .frames_get_constraints = vdpau_frames_get_constraints,
521  .frames_init = vdpau_frames_init,
522  .frames_get_buffer = vdpau_get_buffer,
523  .transfer_get_formats = vdpau_transfer_get_formats,
524  .transfer_data_to = vdpau_transfer_data_to,
525  .transfer_data_from = vdpau_transfer_data_from,
526 
528 };
formats
formats
Definition: signature.h:48
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
VDPAUFramesContext
Definition: hwcontext_vdpau.c:96
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
vdpau_frames_init
static int vdpau_frames_init(AVHWFramesContext *ctx)
Definition: hwcontext_vdpau.c:256
VDPAUPixFmtMap::pix_fmt
enum AVPixelFormat pix_fmt
Definition: hwcontext_vdpau.c:37
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:325
pixdesc.h
VDPAUPixFmtMap::vdpau_fmt
VdpYCbCrFormat vdpau_fmt
Definition: hwcontext_vdpau.c:36
VDPAUFramesContext::pix_fmts
enum AVPixelFormat * pix_fmts
Definition: hwcontext_vdpau.c:102
data
const char data[16]
Definition: mxf.c:143
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:406
AVVDPAUDeviceContext::get_proc_address
VdpGetProcAddress * get_proc_address
Definition: hwcontext_vdpau.h:37
VDPAUDeviceContext::pix_fmts
enum AVPixelFormat * pix_fmts[FF_ARRAY_ELEMS(vdpau_pix_fmts)]
Definition: hwcontext_vdpau.c:92
AVVDPAUDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_vdpau.h:35
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVDictionary
Definition: dict.c:30
chroma_type
VdpChromaType chroma_type
Definition: hwcontext_vdpau.c:69
AVHWFramesConstraints::valid_hw_formats
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:458
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
VDPAUFramesContext::chroma_idx
int chroma_idx
Definition: hwcontext_vdpau.c:100
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:346
AVHWFramesConstraints
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:453
vdpau_buffer_free
static void vdpau_buffer_free(void *opaque, uint8_t *data)
Definition: hwcontext_vdpau.c:219
pix_fmts_422
static const VDPAUPixFmtMap pix_fmts_422[]
Definition: hwcontext_vdpau.c:50
VDPAUDeviceContext
Definition: hwcontext_vdpau.c:85
VDPAUDeviceContext::get_data
VdpVideoSurfaceGetBitsYCbCr * get_data
Definition: hwcontext_vdpau.c:87
VDPAUPixFmtMap
Definition: hwcontext_vdpau.c:35
av_buffer_pool_init2
AVBufferPool * av_buffer_pool_init2(size_t size, void *opaque, AVBufferRef *(*alloc)(void *opaque, size_t size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
Definition: buffer.c:259
VDPAUDeviceContext::get_transfer_caps
VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities * get_transfer_caps
Definition: hwcontext_vdpau.c:86
count_pixfmts
static int count_pixfmts(const VDPAUPixFmtMap *map)
Definition: hwcontext_vdpau.c:106
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
AV_PIX_FMT_YUV444P10
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:409
HWContextType::type
enum AVHWDeviceType type
Definition: hwcontext_internal.h:30
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
device_init
static int device_init(AVFormatContext *ctx, int *width, int *height, uint32_t pixelformat)
Definition: v4l2.c:203
AVHWFramesConstraints::valid_sw_formats
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:465
av_buffer_pool_get
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:387
AV_PIX_FMT_YUV444P16
#define AV_PIX_FMT_YUV444P16
Definition: pixfmt.h:419
AV_BUFFER_FLAG_READONLY
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:114
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:296
ctx
AVFormatContext * ctx
Definition: movenc.c:48
vdpau_pool_alloc
static AVBufferRef * vdpau_pool_alloc(void *opaque, size_t size)
Definition: hwcontext_vdpau.c:228
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demuxing_decoding.c:41
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
frames_sw_format
enum AVPixelFormat frames_sw_format
Definition: hwcontext_vdpau.c:70
pix_fmts_420
static const VDPAUPixFmtMap pix_fmts_420[]
Definition: hwcontext_vdpau.c:40
opts
AVDictionary * opts
Definition: movenc.c:50
NULL
#define NULL
Definition: coverity.c:32
vdpau_frames_get_constraints
static int vdpau_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
Definition: hwcontext_vdpau.c:190
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
ff_hwcontext_type_vdpau
const HWContextType ff_hwcontext_type_vdpau
Definition: hwcontext_vdpau.c:507
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:407
vdpau_pix_fmts
static const struct @314 vdpau_pix_fmts[]
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
VDPAUDeviceContext::surf_destroy
VdpVideoSurfaceDestroy * surf_destroy
Definition: hwcontext_vdpau.c:90
vdpau_device_uninit
static void vdpau_device_uninit(AVHWDeviceContext *ctx)
Definition: hwcontext_vdpau.c:181
size
int size
Definition: twinvq_data.h:10344
AV_PIX_FMT_YUV444P12
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:413
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:412
VDPAUFramesContext::nb_pix_fmts
int nb_pix_fmts
Definition: hwcontext_vdpau.c:103
AV_PIX_FMT_NV16
@ AV_PIX_FMT_NV16
interleaved chroma YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:191
buffer.h
VDPAUFramesContext::put_data
VdpVideoSurfacePutBitsYCbCr * put_data
Definition: hwcontext_vdpau.c:98
VDPAUFramesContext::chroma_type
VdpChromaType chroma_type
Definition: hwcontext_vdpau.c:99
AV_PIX_FMT_VDPAU
@ AV_PIX_FMT_VDPAU
HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface.
Definition: pixfmt.h:187
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:31
common.h
vdpau_transfer_data_to
static int vdpau_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_vdpau.c:387
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:264
AV_HWDEVICE_TYPE_VDPAU
@ AV_HWDEVICE_TYPE_VDPAU
Definition: hwcontext.h:29
vdpau_transfer_get_formats
static int vdpau_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
Definition: hwcontext_vdpau.c:305
hwcontext_vdpau.h
AV_PIX_FMT_P016
#define AV_PIX_FMT_P016
Definition: pixfmt.h:456
AVHWFrameTransferDirection
AVHWFrameTransferDirection
Definition: hwcontext.h:415
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
ret
ret
Definition: filter_design.txt:187
pix_fmts_444
static const VDPAUPixFmtMap pix_fmts_444[]
Definition: hwcontext_vdpau.c:58
pixfmt.h
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AV_PIX_FMT_YUV420P12
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:410
AV_PIX_FMT_UYVY422
@ AV_PIX_FMT_UYVY422
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
Definition: pixfmt.h:81
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
vdpau_init_pixmfts
static int vdpau_init_pixmfts(AVHWDeviceContext *ctx)
Definition: hwcontext_vdpau.c:116
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
vdpau_transfer_data_from
static int vdpau_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_vdpau.c:329
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:455
VDPAUFramesContext::get_data
VdpVideoSurfaceGetBitsYCbCr * get_data
Definition: hwcontext_vdpau.c:97
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
vdpau_device_init
static int vdpau_device_init(AVHWDeviceContext *ctx)
Definition: hwcontext_vdpau.c:158
hwcontext_internal.h
map
const VDPAUPixFmtMap * map
Definition: hwcontext_vdpau.c:71
VDPAUDeviceContext::nb_pix_fmts
int nb_pix_fmts[FF_ARRAY_ELEMS(vdpau_pix_fmts)]
Definition: hwcontext_vdpau.c:93
vdpau_get_buffer
static int vdpau_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
Definition: hwcontext_vdpau.c:291
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
VDPAUDeviceContext::surf_create
VdpVideoSurfaceCreate * surf_create
Definition: hwcontext_vdpau.c:89
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:561
hwcontext.h
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:370
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
HWContextType
Definition: hwcontext_internal.h:29
AVVDPAUDeviceContext::device
VdpDevice device
Definition: hwcontext_vdpau.h:36
VDPAUDeviceContext::put_data
VdpVideoSurfacePutBitsYCbCr * put_data
Definition: hwcontext_vdpau.c:88
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2582
GET_CALLBACK
#define GET_CALLBACK(id, result)
Definition: hwcontext_vdpau.c:147