FFmpeg
hwcontext_qsv.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include <stdint.h>
20 #include <string.h>
21 
22 #include <mfx/mfxvideo.h>
23 
24 #include "config.h"
25 
26 #if HAVE_PTHREADS
27 #include <pthread.h>
28 #endif
29 
30 #define COBJMACROS
31 #if CONFIG_VAAPI
32 #include "hwcontext_vaapi.h"
33 #endif
34 #if CONFIG_D3D11VA
35 #include "hwcontext_d3d11va.h"
36 #endif
37 #if CONFIG_DXVA2
38 #include "hwcontext_dxva2.h"
39 #endif
40 
41 #include "buffer.h"
42 #include "common.h"
43 #include "hwcontext.h"
44 #include "hwcontext_internal.h"
45 #include "hwcontext_qsv.h"
46 #include "mem.h"
47 #include "pixfmt.h"
48 #include "pixdesc.h"
49 #include "time.h"
50 
51 #define QSV_VERSION_ATLEAST(MAJOR, MINOR) \
52  (MFX_VERSION_MAJOR > (MAJOR) || \
53  MFX_VERSION_MAJOR == (MAJOR) && MFX_VERSION_MINOR >= (MINOR))
54 
55 #define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
56 
57 typedef struct QSVDevicePriv {
60 
61 typedef struct QSVDeviceContext {
62  mfxHDL handle;
63  mfxHandleType handle_type;
64  mfxVersion ver;
65  mfxIMPL impl;
66 
70 
71 typedef struct QSVFramesContext {
72  mfxSession session_download;
74  mfxSession session_upload;
76 #if HAVE_PTHREADS
77  pthread_mutex_t session_lock;
78  pthread_cond_t session_cond;
79 #endif
80 
82  mfxFrameSurface1 *surfaces_internal;
83  mfxHDLPair *handle_pairs_internal;
85 
86  // used in the frame allocator for non-opaque surfaces
87  mfxMemId *mem_ids;
88  // used in the opaque alloc request for opaque surfaces
89  mfxFrameSurface1 **surface_ptrs;
90 
91  mfxExtOpaqueSurfaceAlloc opaque_alloc;
92  mfxExtBuffer *ext_buffers[1];
94 
95 static const struct {
97  uint32_t fourcc;
99  { AV_PIX_FMT_NV12, MFX_FOURCC_NV12 },
100  { AV_PIX_FMT_BGRA, MFX_FOURCC_RGB4 },
101  { AV_PIX_FMT_P010, MFX_FOURCC_P010 },
102  { AV_PIX_FMT_PAL8, MFX_FOURCC_P8 },
103 #if CONFIG_VAAPI
105  MFX_FOURCC_YUY2 },
106 #if QSV_VERSION_ATLEAST(1, 27)
107  { AV_PIX_FMT_Y210,
108  MFX_FOURCC_Y210 },
109 #endif
110 #endif
111 };
112 
114 {
115  int i;
116  for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) {
118  return supported_pixel_formats[i].fourcc;
119  }
120  return 0;
121 }
122 
123 #if CONFIG_D3D11VA
124 static uint32_t qsv_get_d3d11va_bind_flags(int mem_type)
125 {
126  uint32_t bind_flags = 0;
127 
128  if ((mem_type & MFX_MEMTYPE_VIDEO_MEMORY_ENCODER_TARGET) && (mem_type & MFX_MEMTYPE_INTERNAL_FRAME))
129  bind_flags = D3D11_BIND_DECODER | D3D11_BIND_VIDEO_ENCODER;
130  else
131  bind_flags = D3D11_BIND_DECODER;
132 
133  if ((MFX_MEMTYPE_FROM_VPPOUT & mem_type) || (MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET & mem_type))
134  bind_flags = D3D11_BIND_RENDER_TARGET;
135 
136  return bind_flags;
137 }
138 #endif
139 
141 {
142  AVQSVDeviceContext *hwctx = ctx->hwctx;
143  QSVDeviceContext *s = ctx->internal->priv;
144  int hw_handle_supported = 0;
145  mfxHandleType handle_type;
146  enum AVHWDeviceType device_type;
147  enum AVPixelFormat pix_fmt;
148  mfxStatus err;
149 
150  err = MFXQueryIMPL(hwctx->session, &s->impl);
151  if (err == MFX_ERR_NONE)
152  err = MFXQueryVersion(hwctx->session, &s->ver);
153  if (err != MFX_ERR_NONE) {
154  av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes\n");
155  return AVERROR_UNKNOWN;
156  }
157 
158  if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(s->impl)) {
159 #if CONFIG_VAAPI
160  handle_type = MFX_HANDLE_VA_DISPLAY;
161  device_type = AV_HWDEVICE_TYPE_VAAPI;
163  hw_handle_supported = 1;
164 #endif
165  } else if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(s->impl)) {
166 #if CONFIG_D3D11VA
167  handle_type = MFX_HANDLE_D3D11_DEVICE;
168  device_type = AV_HWDEVICE_TYPE_D3D11VA;
170  hw_handle_supported = 1;
171 #endif
172  } else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(s->impl)) {
173 #if CONFIG_DXVA2
174  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
175  device_type = AV_HWDEVICE_TYPE_DXVA2;
177  hw_handle_supported = 1;
178 #endif
179  }
180 
181  if (hw_handle_supported) {
182  err = MFXVideoCORE_GetHandle(hwctx->session, handle_type, &s->handle);
183  if (err == MFX_ERR_NONE) {
184  s->handle_type = handle_type;
185  s->child_device_type = device_type;
186  s->child_pix_fmt = pix_fmt;
187  }
188  }
189  if (!s->handle) {
190  av_log(ctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
191  "from the session\n");
192  }
193  return 0;
194 }
195 
197 {
198  QSVFramesContext *s = ctx->internal->priv;
199 
200  if (s->session_download) {
201  MFXVideoVPP_Close(s->session_download);
202  MFXClose(s->session_download);
203  }
204  s->session_download = NULL;
205  s->session_download_init = 0;
206 
207  if (s->session_upload) {
208  MFXVideoVPP_Close(s->session_upload);
209  MFXClose(s->session_upload);
210  }
211  s->session_upload = NULL;
212  s->session_upload_init = 0;
213 
214 #if HAVE_PTHREADS
215  pthread_mutex_destroy(&s->session_lock);
216  pthread_cond_destroy(&s->session_cond);
217 #endif
218 
219  av_freep(&s->mem_ids);
220  av_freep(&s->surface_ptrs);
221  av_freep(&s->surfaces_internal);
222  av_freep(&s->handle_pairs_internal);
223  av_buffer_unref(&s->child_frames_ref);
224 }
225 
226 static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
227 {
228 }
229 
230 static AVBufferRef *qsv_pool_alloc(void *opaque, size_t size)
231 {
233  QSVFramesContext *s = ctx->internal->priv;
234  AVQSVFramesContext *hwctx = ctx->hwctx;
235 
236  if (s->nb_surfaces_used < hwctx->nb_surfaces) {
237  s->nb_surfaces_used++;
238  av_buffer_create((uint8_t*)(s->handle_pairs_internal + s->nb_surfaces_used - 1),
239  sizeof(*s->handle_pairs_internal), qsv_pool_release_dummy, NULL, 0);
240  return av_buffer_create((uint8_t*)(s->surfaces_internal + s->nb_surfaces_used - 1),
241  sizeof(*hwctx->surfaces), qsv_pool_release_dummy, NULL, 0);
242  }
243 
244  return NULL;
245 }
246 
248 {
249  AVQSVFramesContext *hwctx = ctx->hwctx;
250  QSVFramesContext *s = ctx->internal->priv;
251  QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
252 
253  AVBufferRef *child_device_ref = NULL;
254  AVBufferRef *child_frames_ref = NULL;
255 
256  AVHWDeviceContext *child_device_ctx;
257  AVHWFramesContext *child_frames_ctx;
258 
259  int i, ret = 0;
260 
261  if (!device_priv->handle) {
263  "Cannot create a non-opaque internal surface pool without "
264  "a hardware handle\n");
265  return AVERROR(EINVAL);
266  }
267 
268  child_device_ref = av_hwdevice_ctx_alloc(device_priv->child_device_type);
269  if (!child_device_ref)
270  return AVERROR(ENOMEM);
271  child_device_ctx = (AVHWDeviceContext*)child_device_ref->data;
272 
273 #if CONFIG_VAAPI
274  if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
275  AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
276  child_device_hwctx->display = (VADisplay)device_priv->handle;
277  }
278 #endif
279 #if CONFIG_D3D11VA
280  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
281  AVD3D11VADeviceContext *child_device_hwctx = child_device_ctx->hwctx;
282  ID3D11Device_AddRef((ID3D11Device*)device_priv->handle);
283  child_device_hwctx->device = (ID3D11Device*)device_priv->handle;
284  }
285 #endif
286 #if CONFIG_DXVA2
287  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
288  AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
289  child_device_hwctx->devmgr = (IDirect3DDeviceManager9*)device_priv->handle;
290  }
291 #endif
292 
293  ret = av_hwdevice_ctx_init(child_device_ref);
294  if (ret < 0) {
295  av_log(ctx, AV_LOG_ERROR, "Error initializing a child device context\n");
296  goto fail;
297  }
298 
299  child_frames_ref = av_hwframe_ctx_alloc(child_device_ref);
300  if (!child_frames_ref) {
301  ret = AVERROR(ENOMEM);
302  goto fail;
303  }
304  child_frames_ctx = (AVHWFramesContext*)child_frames_ref->data;
305 
306  child_frames_ctx->format = device_priv->child_pix_fmt;
307  child_frames_ctx->sw_format = ctx->sw_format;
308  child_frames_ctx->initial_pool_size = ctx->initial_pool_size;
309  child_frames_ctx->width = FFALIGN(ctx->width, 16);
310  child_frames_ctx->height = FFALIGN(ctx->height, 16);
311 
312 #if CONFIG_D3D11VA
313  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
314  AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
315  if (hwctx->frame_type == 0)
316  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
317  if (hwctx->frame_type & MFX_MEMTYPE_SHARED_RESOURCE)
318  child_frames_hwctx->MiscFlags = D3D11_RESOURCE_MISC_SHARED;
319  child_frames_hwctx->BindFlags = qsv_get_d3d11va_bind_flags(hwctx->frame_type);
320  }
321 #endif
322 #if CONFIG_DXVA2
323  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
324  AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
325  if (hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)
326  child_frames_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
327  else
328  child_frames_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
329  }
330 #endif
331 
332  ret = av_hwframe_ctx_init(child_frames_ref);
333  if (ret < 0) {
334  av_log(ctx, AV_LOG_ERROR, "Error initializing a child frames context\n");
335  goto fail;
336  }
337 
338 #if CONFIG_VAAPI
339  if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
340  AVVAAPIFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
341  for (i = 0; i < ctx->initial_pool_size; i++) {
342  s->handle_pairs_internal[i].first = child_frames_hwctx->surface_ids + i;
343  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
344  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
345  }
346  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
347  }
348 #endif
349 #if CONFIG_D3D11VA
350  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
351  AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
352  for (i = 0; i < ctx->initial_pool_size; i++) {
353  s->handle_pairs_internal[i].first = (mfxMemId)child_frames_hwctx->texture_infos[i].texture;
354  if(child_frames_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
355  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
356  } else {
357  s->handle_pairs_internal[i].second = (mfxMemId)child_frames_hwctx->texture_infos[i].index;
358  }
359  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
360  }
361  if (child_frames_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
362  hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
363  } else {
364  hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
365  }
366  }
367 #endif
368 #if CONFIG_DXVA2
369  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
370  AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
371  for (i = 0; i < ctx->initial_pool_size; i++) {
372  s->handle_pairs_internal[i].first = (mfxMemId)child_frames_hwctx->surfaces[i];
373  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
374  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
375  }
376  if (child_frames_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
377  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
378  else
379  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
380  }
381 #endif
382 
383  s->child_frames_ref = child_frames_ref;
384  child_frames_ref = NULL;
385 
386 fail:
387  av_buffer_unref(&child_device_ref);
388  av_buffer_unref(&child_frames_ref);
389  return ret;
390 }
391 
392 static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
393 {
394  const AVPixFmtDescriptor *desc;
395  uint32_t fourcc;
396 
397  desc = av_pix_fmt_desc_get(ctx->sw_format);
398  if (!desc)
399  return AVERROR(EINVAL);
400 
401  fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
402  if (!fourcc)
403  return AVERROR(EINVAL);
404 
405  surf->Info.BitDepthLuma = desc->comp[0].depth;
406  surf->Info.BitDepthChroma = desc->comp[0].depth;
407  surf->Info.Shift = desc->comp[0].depth > 8;
408 
409  if (desc->log2_chroma_w && desc->log2_chroma_h)
410  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
411  else if (desc->log2_chroma_w)
412  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV422;
413  else
414  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV444;
415 
416  surf->Info.FourCC = fourcc;
417  surf->Info.Width = FFALIGN(ctx->width, 16);
418  surf->Info.CropW = ctx->width;
419  surf->Info.Height = FFALIGN(ctx->height, 16);
420  surf->Info.CropH = ctx->height;
421  surf->Info.FrameRateExtN = 25;
422  surf->Info.FrameRateExtD = 1;
423  surf->Info.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
424 
425  return 0;
426 }
427 
429 {
430  QSVFramesContext *s = ctx->internal->priv;
431  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
432 
433  int i, ret = 0;
434 
435  if (ctx->initial_pool_size <= 0) {
436  av_log(ctx, AV_LOG_ERROR, "QSV requires a fixed frame pool size\n");
437  return AVERROR(EINVAL);
438  }
439 
440  s->handle_pairs_internal = av_calloc(ctx->initial_pool_size,
441  sizeof(*s->handle_pairs_internal));
442  if (!s->handle_pairs_internal)
443  return AVERROR(ENOMEM);
444 
445  s->surfaces_internal = av_calloc(ctx->initial_pool_size,
446  sizeof(*s->surfaces_internal));
447  if (!s->surfaces_internal)
448  return AVERROR(ENOMEM);
449 
450  for (i = 0; i < ctx->initial_pool_size; i++) {
451  ret = qsv_init_surface(ctx, &s->surfaces_internal[i]);
452  if (ret < 0)
453  return ret;
454  }
455 
456  if (!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)) {
458  if (ret < 0)
459  return ret;
460  }
461 
462  ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(mfxFrameSurface1),
464  if (!ctx->internal->pool_internal)
465  return AVERROR(ENOMEM);
466 
467  frames_hwctx->surfaces = s->surfaces_internal;
468  frames_hwctx->nb_surfaces = ctx->initial_pool_size;
469 
470  return 0;
471 }
472 
473 static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
474  mfxFrameAllocResponse *resp)
475 {
476  AVHWFramesContext *ctx = pthis;
477  QSVFramesContext *s = ctx->internal->priv;
478  AVQSVFramesContext *hwctx = ctx->hwctx;
479  mfxFrameInfo *i = &req->Info;
480  mfxFrameInfo *i1 = &hwctx->surfaces[0].Info;
481 
482  if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
483  !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
484  !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
485  return MFX_ERR_UNSUPPORTED;
486  if (i->Width > i1->Width || i->Height > i1->Height ||
487  i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
488  av_log(ctx, AV_LOG_ERROR, "Mismatching surface properties in an "
489  "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
490  i->Width, i->Height, i->FourCC, i->ChromaFormat,
491  i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
492  return MFX_ERR_UNSUPPORTED;
493  }
494 
495  resp->mids = s->mem_ids;
496  resp->NumFrameActual = hwctx->nb_surfaces;
497 
498  return MFX_ERR_NONE;
499 }
500 
501 static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
502 {
503  return MFX_ERR_NONE;
504 }
505 
506 static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
507 {
508  return MFX_ERR_UNSUPPORTED;
509 }
510 
511 static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
512 {
513  return MFX_ERR_UNSUPPORTED;
514 }
515 
516 static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
517 {
518  mfxHDLPair *pair_dst = (mfxHDLPair*)hdl;
519  mfxHDLPair *pair_src = (mfxHDLPair*)mid;
520 
521  pair_dst->first = pair_src->first;
522 
523  if (pair_src->second != (mfxMemId)MFX_INFINITE)
524  pair_dst->second = pair_src->second;
525  return MFX_ERR_NONE;
526 }
527 
529  mfxSession *session, int upload)
530 {
531  QSVFramesContext *s = ctx->internal->priv;
532  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
533  QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
534  int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
535 
536  mfxFrameAllocator frame_allocator = {
537  .pthis = ctx,
538  .Alloc = frame_alloc,
539  .Lock = frame_lock,
540  .Unlock = frame_unlock,
541  .GetHDL = frame_get_hdl,
542  .Free = frame_free,
543  };
544 
545  mfxVideoParam par;
546  mfxStatus err;
547 
548  err = MFXInit(device_priv->impl, &device_priv->ver, session);
549  if (err != MFX_ERR_NONE) {
550  av_log(ctx, AV_LOG_ERROR, "Error initializing an internal session\n");
551  return AVERROR_UNKNOWN;
552  }
553 
554  if (device_priv->handle) {
555  err = MFXVideoCORE_SetHandle(*session, device_priv->handle_type,
556  device_priv->handle);
557  if (err != MFX_ERR_NONE)
558  return AVERROR_UNKNOWN;
559  }
560 
561  if (!opaque) {
562  err = MFXVideoCORE_SetFrameAllocator(*session, &frame_allocator);
563  if (err != MFX_ERR_NONE)
564  return AVERROR_UNKNOWN;
565  }
566 
567  memset(&par, 0, sizeof(par));
568 
569  if (opaque) {
570  par.ExtParam = s->ext_buffers;
571  par.NumExtParam = FF_ARRAY_ELEMS(s->ext_buffers);
572  par.IOPattern = upload ? MFX_IOPATTERN_OUT_OPAQUE_MEMORY :
573  MFX_IOPATTERN_IN_OPAQUE_MEMORY;
574  } else {
575  par.IOPattern = upload ? MFX_IOPATTERN_OUT_VIDEO_MEMORY :
576  MFX_IOPATTERN_IN_VIDEO_MEMORY;
577  }
578 
579  par.IOPattern |= upload ? MFX_IOPATTERN_IN_SYSTEM_MEMORY :
580  MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
581  par.AsyncDepth = 1;
582 
583  par.vpp.In = frames_hwctx->surfaces[0].Info;
584 
585  /* Apparently VPP requires the frame rate to be set to some value, otherwise
586  * init will fail (probably for the framerate conversion filter). Since we
587  * are only doing data upload/download here, we just invent an arbitrary
588  * value */
589  par.vpp.In.FrameRateExtN = 25;
590  par.vpp.In.FrameRateExtD = 1;
591  par.vpp.Out = par.vpp.In;
592 
593  err = MFXVideoVPP_Init(*session, &par);
594  if (err != MFX_ERR_NONE) {
595  av_log(ctx, AV_LOG_VERBOSE, "Error opening the internal VPP session."
596  "Surface upload/download will not be possible\n");
597  MFXClose(*session);
598  *session = NULL;
599  }
600 
601  return 0;
602 }
603 
605 {
606  QSVFramesContext *s = ctx->internal->priv;
607  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
608 
609  int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
610 
611  uint32_t fourcc;
612  int i, ret;
613 
614  fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
615  if (!fourcc) {
616  av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format\n");
617  return AVERROR(ENOSYS);
618  }
619 
620  if (!ctx->pool) {
622  if (ret < 0) {
623  av_log(ctx, AV_LOG_ERROR, "Error creating an internal frame pool\n");
624  return ret;
625  }
626  }
627 
628  if (opaque) {
629  s->surface_ptrs = av_calloc(frames_hwctx->nb_surfaces,
630  sizeof(*s->surface_ptrs));
631  if (!s->surface_ptrs)
632  return AVERROR(ENOMEM);
633 
634  for (i = 0; i < frames_hwctx->nb_surfaces; i++)
635  s->surface_ptrs[i] = frames_hwctx->surfaces + i;
636 
637  s->opaque_alloc.In.Surfaces = s->surface_ptrs;
638  s->opaque_alloc.In.NumSurface = frames_hwctx->nb_surfaces;
639  s->opaque_alloc.In.Type = frames_hwctx->frame_type;
640 
641  s->opaque_alloc.Out = s->opaque_alloc.In;
642 
643  s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
644  s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
645 
646  s->ext_buffers[0] = (mfxExtBuffer*)&s->opaque_alloc;
647  } else {
648  s->mem_ids = av_calloc(frames_hwctx->nb_surfaces, sizeof(*s->mem_ids));
649  if (!s->mem_ids)
650  return AVERROR(ENOMEM);
651 
652  for (i = 0; i < frames_hwctx->nb_surfaces; i++)
653  s->mem_ids[i] = frames_hwctx->surfaces[i].Data.MemId;
654  }
655 
656  s->session_download = NULL;
657  s->session_upload = NULL;
658 
659  s->session_download_init = 0;
660  s->session_upload_init = 0;
661 
662 #if HAVE_PTHREADS
663  pthread_mutex_init(&s->session_lock, NULL);
664  pthread_cond_init(&s->session_cond, NULL);
665 #endif
666 
667  return 0;
668 }
669 
671 {
672  frame->buf[0] = av_buffer_pool_get(ctx->pool);
673  if (!frame->buf[0])
674  return AVERROR(ENOMEM);
675 
676  frame->data[3] = frame->buf[0]->data;
677  frame->format = AV_PIX_FMT_QSV;
678  frame->width = ctx->width;
679  frame->height = ctx->height;
680 
681  return 0;
682 }
683 
686  enum AVPixelFormat **formats)
687 {
688  enum AVPixelFormat *fmts;
689 
690  fmts = av_malloc_array(2, sizeof(*fmts));
691  if (!fmts)
692  return AVERROR(ENOMEM);
693 
694  fmts[0] = ctx->sw_format;
695  fmts[1] = AV_PIX_FMT_NONE;
696 
697  *formats = fmts;
698 
699  return 0;
700 }
701 
703  AVHWFramesContext *src_ctx, int flags)
704 {
705  AVQSVFramesContext *src_hwctx = src_ctx->hwctx;
706  int i;
707 
708  switch (dst_ctx->device_ctx->type) {
709 #if CONFIG_VAAPI
711  {
712  AVVAAPIFramesContext *dst_hwctx = dst_ctx->hwctx;
713  dst_hwctx->surface_ids = av_calloc(src_hwctx->nb_surfaces,
714  sizeof(*dst_hwctx->surface_ids));
715  if (!dst_hwctx->surface_ids)
716  return AVERROR(ENOMEM);
717  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
718  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
719  dst_hwctx->surface_ids[i] = *(VASurfaceID*)pair->first;
720  }
721  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
722  }
723  break;
724 #endif
725 #if CONFIG_D3D11VA
727  {
728  AVD3D11VAFramesContext *dst_hwctx = dst_ctx->hwctx;
729  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
730  dst_hwctx->texture = (ID3D11Texture2D*)pair->first;
731  if (src_hwctx->frame_type & MFX_MEMTYPE_SHARED_RESOURCE)
732  dst_hwctx->MiscFlags = D3D11_RESOURCE_MISC_SHARED;
733  dst_hwctx->BindFlags = qsv_get_d3d11va_bind_flags(src_hwctx->frame_type);
734  }
735  break;
736 #endif
737 #if CONFIG_DXVA2
739  {
740  AVDXVA2FramesContext *dst_hwctx = dst_ctx->hwctx;
741  dst_hwctx->surfaces = av_calloc(src_hwctx->nb_surfaces,
742  sizeof(*dst_hwctx->surfaces));
743  if (!dst_hwctx->surfaces)
744  return AVERROR(ENOMEM);
745  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
746  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
747  dst_hwctx->surfaces[i] = (IDirect3DSurface9*)pair->first;
748  }
749  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
750  if (src_hwctx->frame_type == MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
751  dst_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
752  else
753  dst_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
754  }
755  break;
756 #endif
757  default:
758  return AVERROR(ENOSYS);
759  }
760 
761  return 0;
762 }
763 
765  AVFrame *dst, const AVFrame *src, int flags)
766 {
767  QSVFramesContext *s = ctx->internal->priv;
768  mfxFrameSurface1 *surf = (mfxFrameSurface1*)src->data[3];
769  AVHWFramesContext *child_frames_ctx;
770  const AVPixFmtDescriptor *desc;
771  uint8_t *child_data;
772  AVFrame *dummy;
773  int ret = 0;
774 
775  if (!s->child_frames_ref)
776  return AVERROR(ENOSYS);
777  child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
778 
779  switch (child_frames_ctx->device_ctx->type) {
780 #if CONFIG_VAAPI
782  {
783  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
784  /* pair->first is *VASurfaceID while data[3] in vaapi frame is VASurfaceID, so
785  * we need this casting for vaapi.
786  * Add intptr_t to force cast from VASurfaceID(uint) type to pointer(long) type
787  * to avoid compile warning */
788  child_data = (uint8_t*)(intptr_t)*(VASurfaceID*)pair->first;
789  break;
790  }
791 #endif
792 #if CONFIG_D3D11VA
794  {
795  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
796  child_data = pair->first;
797  break;
798  }
799 #endif
800 #if CONFIG_DXVA2
802  {
803  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
804  child_data = pair->first;
805  break;
806  }
807 #endif
808  default:
809  return AVERROR(ENOSYS);
810  }
811 
812  if (dst->format == child_frames_ctx->format) {
813  ret = ff_hwframe_map_create(s->child_frames_ref,
814  dst, src, NULL, NULL);
815  if (ret < 0)
816  return ret;
817 
818  dst->width = src->width;
819  dst->height = src->height;
820 
821  if (child_frames_ctx->device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
822  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
823  dst->data[0] = pair->first;
824  dst->data[1] = pair->second;
825  } else {
826  dst->data[3] = child_data;
827  }
828 
829  return 0;
830  }
831 
833  if (desc && desc->flags & AV_PIX_FMT_FLAG_HWACCEL) {
834  // This only supports mapping to software.
835  return AVERROR(ENOSYS);
836  }
837 
838  dummy = av_frame_alloc();
839  if (!dummy)
840  return AVERROR(ENOMEM);
841 
842  dummy->buf[0] = av_buffer_ref(src->buf[0]);
843  dummy->hw_frames_ctx = av_buffer_ref(s->child_frames_ref);
844  if (!dummy->buf[0] || !dummy->hw_frames_ctx)
845  goto fail;
846 
847  dummy->format = child_frames_ctx->format;
848  dummy->width = src->width;
849  dummy->height = src->height;
850 
851  if (child_frames_ctx->device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
852  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
853  dummy->data[0] = pair->first;
854  dummy->data[1] = pair->second;
855  } else {
856  dummy->data[3] = child_data;
857  }
858 
859  ret = av_hwframe_map(dst, dummy, flags);
860 
861 fail:
863 
864  return ret;
865 }
866 
868  const AVFrame *src)
869 {
870  QSVFramesContext *s = ctx->internal->priv;
871  AVHWFramesContext *child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
872  int download = !!src->hw_frames_ctx;
873  mfxFrameSurface1 *surf = (mfxFrameSurface1*)(download ? src->data[3] : dst->data[3]);
874 
875  AVFrame *dummy;
876  int ret;
877 
878  dummy = av_frame_alloc();
879  if (!dummy)
880  return AVERROR(ENOMEM);
881 
882  dummy->format = child_frames_ctx->format;
883  dummy->width = src->width;
884  dummy->height = src->height;
885  dummy->buf[0] = download ? src->buf[0] : dst->buf[0];
886  dummy->data[3] = surf->Data.MemId;
887  dummy->hw_frames_ctx = s->child_frames_ref;
888 
889  ret = download ? av_hwframe_transfer_data(dst, dummy, 0) :
891 
892  dummy->buf[0] = NULL;
893  dummy->data[3] = NULL;
894  dummy->hw_frames_ctx = NULL;
895 
897 
898  return ret;
899 }
900 
901 static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
902 {
903  switch (frame->format) {
904  case AV_PIX_FMT_NV12:
905  case AV_PIX_FMT_P010:
906  surface->Data.Y = frame->data[0];
907  surface->Data.UV = frame->data[1];
908  break;
909 
910  case AV_PIX_FMT_YUV420P:
911  surface->Data.Y = frame->data[0];
912  surface->Data.U = frame->data[1];
913  surface->Data.V = frame->data[2];
914  break;
915 
916  case AV_PIX_FMT_BGRA:
917  surface->Data.B = frame->data[0];
918  surface->Data.G = frame->data[0] + 1;
919  surface->Data.R = frame->data[0] + 2;
920  surface->Data.A = frame->data[0] + 3;
921  break;
922 #if CONFIG_VAAPI
923  case AV_PIX_FMT_YUYV422:
924  surface->Data.Y = frame->data[0];
925  surface->Data.U = frame->data[0] + 1;
926  surface->Data.V = frame->data[0] + 3;
927  break;
928 
929  case AV_PIX_FMT_Y210:
930  surface->Data.Y16 = (mfxU16 *)frame->data[0];
931  surface->Data.U16 = (mfxU16 *)frame->data[0] + 1;
932  surface->Data.V16 = (mfxU16 *)frame->data[0] + 3;
933  break;
934 #endif
935  default:
936  return MFX_ERR_UNSUPPORTED;
937  }
938  surface->Data.Pitch = frame->linesize[0];
939  surface->Data.TimeStamp = frame->pts;
940 
941  return 0;
942 }
943 
945  const AVFrame *src)
946 {
947  QSVFramesContext *s = ctx->internal->priv;
948  mfxFrameSurface1 out = {{ 0 }};
949  mfxFrameSurface1 *in = (mfxFrameSurface1*)src->data[3];
950 
951  mfxSyncPoint sync = NULL;
952  mfxStatus err;
953  int ret = 0;
954 
955  while (!s->session_download_init && !s->session_download && !ret) {
956 #if HAVE_PTHREADS
957  if (pthread_mutex_trylock(&s->session_lock) == 0) {
958 #endif
959  if (!s->session_download_init) {
960  ret = qsv_init_internal_session(ctx, &s->session_download, 0);
961  if (s->session_download)
962  s->session_download_init = 1;
963  }
964 #if HAVE_PTHREADS
965  pthread_mutex_unlock(&s->session_lock);
966  pthread_cond_signal(&s->session_cond);
967  } else {
968  pthread_mutex_lock(&s->session_lock);
969  while (!s->session_download_init && !s->session_download) {
970  pthread_cond_wait(&s->session_cond, &s->session_lock);
971  }
972  pthread_mutex_unlock(&s->session_lock);
973  }
974 #endif
975  }
976 
977  if (ret < 0)
978  return ret;
979 
980  if (!s->session_download) {
981  if (s->child_frames_ref)
982  return qsv_transfer_data_child(ctx, dst, src);
983 
984  av_log(ctx, AV_LOG_ERROR, "Surface download not possible\n");
985  return AVERROR(ENOSYS);
986  }
987 
988  out.Info = in->Info;
989  map_frame_to_surface(dst, &out);
990 
991  do {
992  err = MFXVideoVPP_RunFrameVPPAsync(s->session_download, in, &out, NULL, &sync);
993  if (err == MFX_WRN_DEVICE_BUSY)
994  av_usleep(1);
995  } while (err == MFX_WRN_DEVICE_BUSY);
996 
997  if (err < 0 || !sync) {
998  av_log(ctx, AV_LOG_ERROR, "Error downloading the surface\n");
999  return AVERROR_UNKNOWN;
1000  }
1001 
1002  do {
1003  err = MFXVideoCORE_SyncOperation(s->session_download, sync, 1000);
1004  } while (err == MFX_WRN_IN_EXECUTION);
1005  if (err < 0) {
1006  av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation: %d\n", err);
1007  return AVERROR_UNKNOWN;
1008  }
1009 
1010  return 0;
1011 }
1012 
1014  const AVFrame *src)
1015 {
1016  QSVFramesContext *s = ctx->internal->priv;
1017  mfxFrameSurface1 in = {{ 0 }};
1018  mfxFrameSurface1 *out = (mfxFrameSurface1*)dst->data[3];
1019 
1020  mfxSyncPoint sync = NULL;
1021  mfxStatus err;
1022  int ret = 0;
1023  /* make a copy if the input is not padded as libmfx requires */
1024  AVFrame tmp_frame;
1025  const AVFrame *src_frame;
1026  int realigned = 0;
1027 
1028 
1029  while (!s->session_upload_init && !s->session_upload && !ret) {
1030 #if HAVE_PTHREADS
1031  if (pthread_mutex_trylock(&s->session_lock) == 0) {
1032 #endif
1033  if (!s->session_upload_init) {
1034  ret = qsv_init_internal_session(ctx, &s->session_upload, 1);
1035  if (s->session_upload)
1036  s->session_upload_init = 1;
1037  }
1038 #if HAVE_PTHREADS
1039  pthread_mutex_unlock(&s->session_lock);
1040  pthread_cond_signal(&s->session_cond);
1041  } else {
1042  pthread_mutex_lock(&s->session_lock);
1043  while (!s->session_upload_init && !s->session_upload) {
1044  pthread_cond_wait(&s->session_cond, &s->session_lock);
1045  }
1046  pthread_mutex_unlock(&s->session_lock);
1047  }
1048 #endif
1049  }
1050  if (ret < 0)
1051  return ret;
1052 
1053  if (src->height & 15 || src->linesize[0] & 15) {
1054  realigned = 1;
1055  memset(&tmp_frame, 0, sizeof(tmp_frame));
1056  tmp_frame.format = src->format;
1057  tmp_frame.width = FFALIGN(src->width, 16);
1058  tmp_frame.height = FFALIGN(src->height, 16);
1059  ret = av_frame_get_buffer(&tmp_frame, 0);
1060  if (ret < 0)
1061  return ret;
1062 
1063  ret = av_frame_copy(&tmp_frame, src);
1064  if (ret < 0) {
1065  av_frame_unref(&tmp_frame);
1066  return ret;
1067  }
1068  }
1069 
1070  src_frame = realigned ? &tmp_frame : src;
1071 
1072  if (!s->session_upload) {
1073  if (s->child_frames_ref)
1074  return qsv_transfer_data_child(ctx, dst, src_frame);
1075 
1076  av_log(ctx, AV_LOG_ERROR, "Surface upload not possible\n");
1077  return AVERROR(ENOSYS);
1078  }
1079 
1080  in.Info = out->Info;
1081  map_frame_to_surface(src_frame, &in);
1082 
1083  do {
1084  err = MFXVideoVPP_RunFrameVPPAsync(s->session_upload, &in, out, NULL, &sync);
1085  if (err == MFX_WRN_DEVICE_BUSY)
1086  av_usleep(1);
1087  } while (err == MFX_WRN_DEVICE_BUSY);
1088 
1089  if (err < 0 || !sync) {
1090  av_log(ctx, AV_LOG_ERROR, "Error uploading the surface\n");
1091  return AVERROR_UNKNOWN;
1092  }
1093 
1094  do {
1095  err = MFXVideoCORE_SyncOperation(s->session_upload, sync, 1000);
1096  } while (err == MFX_WRN_IN_EXECUTION);
1097  if (err < 0) {
1098  av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation\n");
1099  return AVERROR_UNKNOWN;
1100  }
1101 
1102  if (realigned)
1103  av_frame_unref(&tmp_frame);
1104 
1105  return 0;
1106 }
1107 
1109  AVHWFramesContext *src_ctx, int flags)
1110 {
1111  QSVFramesContext *s = dst_ctx->internal->priv;
1112  AVQSVFramesContext *dst_hwctx = dst_ctx->hwctx;
1113  int i;
1114 
1115  if (src_ctx->initial_pool_size == 0) {
1116  av_log(dst_ctx, AV_LOG_ERROR, "Only fixed-size pools can be "
1117  "mapped to QSV frames.\n");
1118  return AVERROR(EINVAL);
1119  }
1120 
1121  switch (src_ctx->device_ctx->type) {
1122 #if CONFIG_VAAPI
1124  {
1125  AVVAAPIFramesContext *src_hwctx = src_ctx->hwctx;
1126  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1127  sizeof(*s->handle_pairs_internal));
1128  if (!s->handle_pairs_internal)
1129  return AVERROR(ENOMEM);
1130  s->surfaces_internal = av_calloc(src_hwctx->nb_surfaces,
1131  sizeof(*s->surfaces_internal));
1132  if (!s->surfaces_internal)
1133  return AVERROR(ENOMEM);
1134  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1135  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1136  s->handle_pairs_internal[i].first = src_hwctx->surface_ids + i;
1137  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1138  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1139  }
1140  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1141  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1142  }
1143  break;
1144 #endif
1145 #if CONFIG_D3D11VA
1147  {
1148  AVD3D11VAFramesContext *src_hwctx = src_ctx->hwctx;
1149  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1150  sizeof(*s->handle_pairs_internal));
1151  if (!s->handle_pairs_internal)
1152  return AVERROR(ENOMEM);
1153  s->surfaces_internal = av_calloc(src_ctx->initial_pool_size,
1154  sizeof(*s->surfaces_internal));
1155  if (!s->surfaces_internal)
1156  return AVERROR(ENOMEM);
1157  for (i = 0; i < src_ctx->initial_pool_size; i++) {
1158  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1159  s->handle_pairs_internal[i].first = (mfxMemId)src_hwctx->texture_infos[i].texture;
1160  if (src_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
1161  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1162  } else {
1163  s->handle_pairs_internal[i].second = (mfxMemId)src_hwctx->texture_infos[i].index;
1164  }
1165  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1166  }
1167  dst_hwctx->nb_surfaces = src_ctx->initial_pool_size;
1168  if (src_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
1169  dst_hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
1170  } else {
1171  dst_hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1172  }
1173  }
1174  break;
1175 #endif
1176 #if CONFIG_DXVA2
1178  {
1179  AVDXVA2FramesContext *src_hwctx = src_ctx->hwctx;
1180  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1181  sizeof(*s->handle_pairs_internal));
1182  if (!s->handle_pairs_internal)
1183  return AVERROR(ENOMEM);
1184  s->surfaces_internal = av_calloc(src_hwctx->nb_surfaces,
1185  sizeof(*s->surfaces_internal));
1186  if (!s->surfaces_internal)
1187  return AVERROR(ENOMEM);
1188  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1189  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1190  s->handle_pairs_internal[i].first = (mfxMemId)src_hwctx->surfaces[i];
1191  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1192  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1193  }
1194  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1195  if (src_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
1196  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
1197  else
1198  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1199  }
1200  break;
1201 #endif
1202  default:
1203  return AVERROR(ENOSYS);
1204  }
1205 
1206  dst_hwctx->surfaces = s->surfaces_internal;
1207 
1208  return 0;
1209 }
1210 
1211 static int qsv_map_to(AVHWFramesContext *dst_ctx,
1212  AVFrame *dst, const AVFrame *src, int flags)
1213 {
1214  AVQSVFramesContext *hwctx = dst_ctx->hwctx;
1215  int i, err, index = -1;
1216 
1217  for (i = 0; i < hwctx->nb_surfaces && index < 0; i++) {
1218  switch(src->format) {
1219 #if CONFIG_VAAPI
1220  case AV_PIX_FMT_VAAPI:
1221  {
1222  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1223  if (pair->first == src->data[3]) {
1224  index = i;
1225  break;
1226  }
1227  }
1228 #endif
1229 #if CONFIG_D3D11VA
1230  case AV_PIX_FMT_D3D11:
1231  {
1232  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1233  if (pair->first == src->data[0]
1234  && pair->second == src->data[1]) {
1235  index = i;
1236  break;
1237  }
1238  }
1239 #endif
1240 #if CONFIG_DXVA2
1241  case AV_PIX_FMT_DXVA2_VLD:
1242  {
1243  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1244  if (pair->first == src->data[3]) {
1245  index = i;
1246  break;
1247  }
1248  }
1249 #endif
1250  }
1251  }
1252  if (index < 0) {
1253  av_log(dst_ctx, AV_LOG_ERROR, "Trying to map from a surface which "
1254  "is not in the mapped frames context.\n");
1255  return AVERROR(EINVAL);
1256  }
1257 
1259  dst, src, NULL, NULL);
1260  if (err)
1261  return err;
1262 
1263  dst->width = src->width;
1264  dst->height = src->height;
1265  dst->data[3] = (uint8_t*)&hwctx->surfaces[index];
1266 
1267  return 0;
1268 }
1269 
1271  const void *hwconfig,
1272  AVHWFramesConstraints *constraints)
1273 {
1274  int i;
1275 
1277  sizeof(*constraints->valid_sw_formats));
1278  if (!constraints->valid_sw_formats)
1279  return AVERROR(ENOMEM);
1280 
1281  for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++)
1282  constraints->valid_sw_formats[i] = supported_pixel_formats[i].pix_fmt;
1284 
1285  constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
1286  if (!constraints->valid_hw_formats)
1287  return AVERROR(ENOMEM);
1288 
1289  constraints->valid_hw_formats[0] = AV_PIX_FMT_QSV;
1290  constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
1291 
1292  return 0;
1293 }
1294 
1296 {
1297  AVQSVDeviceContext *hwctx = ctx->hwctx;
1298  QSVDevicePriv *priv = ctx->user_opaque;
1299 
1300  if (hwctx->session)
1301  MFXClose(hwctx->session);
1302 
1304  av_freep(&priv);
1305 }
1306 
1307 static mfxIMPL choose_implementation(const char *device, enum AVHWDeviceType child_device_type)
1308 {
1309  static const struct {
1310  const char *name;
1311  mfxIMPL impl;
1312  } impl_map[] = {
1313  { "auto", MFX_IMPL_AUTO },
1314  { "sw", MFX_IMPL_SOFTWARE },
1315  { "hw", MFX_IMPL_HARDWARE },
1316  { "auto_any", MFX_IMPL_AUTO_ANY },
1317  { "hw_any", MFX_IMPL_HARDWARE_ANY },
1318  { "hw2", MFX_IMPL_HARDWARE2 },
1319  { "hw3", MFX_IMPL_HARDWARE3 },
1320  { "hw4", MFX_IMPL_HARDWARE4 },
1321  };
1322 
1323  mfxIMPL impl = MFX_IMPL_AUTO_ANY;
1324  int i;
1325 
1326  if (device) {
1327  for (i = 0; i < FF_ARRAY_ELEMS(impl_map); i++)
1328  if (!strcmp(device, impl_map[i].name)) {
1329  impl = impl_map[i].impl;
1330  break;
1331  }
1332  if (i == FF_ARRAY_ELEMS(impl_map))
1333  impl = strtol(device, NULL, 0);
1334  }
1335 
1336  if (impl != MFX_IMPL_SOFTWARE) {
1337  if (child_device_type == AV_HWDEVICE_TYPE_D3D11VA)
1338  impl |= MFX_IMPL_VIA_D3D11;
1339  else if (child_device_type == AV_HWDEVICE_TYPE_DXVA2)
1340  impl |= MFX_IMPL_VIA_D3D9;
1341  }
1342 
1343  return impl;
1344 }
1345 
1347  mfxIMPL implementation,
1348  AVHWDeviceContext *child_device_ctx,
1349  int flags)
1350 {
1351  AVQSVDeviceContext *hwctx = ctx->hwctx;
1352 
1353  mfxVersion ver = { { 3, 1 } };
1354  mfxHDL handle;
1355  mfxHandleType handle_type;
1356  mfxStatus err;
1357  int ret;
1358 
1359  switch (child_device_ctx->type) {
1360 #if CONFIG_VAAPI
1362  {
1363  AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1364  handle_type = MFX_HANDLE_VA_DISPLAY;
1365  handle = (mfxHDL)child_device_hwctx->display;
1366  }
1367  break;
1368 #endif
1369 #if CONFIG_D3D11VA
1371  {
1372  AVD3D11VADeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1373  handle_type = MFX_HANDLE_D3D11_DEVICE;
1374  handle = (mfxHDL)child_device_hwctx->device;
1375  }
1376  break;
1377 #endif
1378 #if CONFIG_DXVA2
1380  {
1381  AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1382  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
1383  handle = (mfxHDL)child_device_hwctx->devmgr;
1384  }
1385  break;
1386 #endif
1387  default:
1388  ret = AVERROR(ENOSYS);
1389  goto fail;
1390  }
1391 
1392  err = MFXInit(implementation, &ver, &hwctx->session);
1393  if (err != MFX_ERR_NONE) {
1394  av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session: "
1395  "%d.\n", err);
1396  ret = AVERROR_UNKNOWN;
1397  goto fail;
1398  }
1399 
1400  err = MFXQueryVersion(hwctx->session, &ver);
1401  if (err != MFX_ERR_NONE) {
1402  av_log(ctx, AV_LOG_ERROR, "Error querying an MFX session: %d.\n", err);
1403  ret = AVERROR_UNKNOWN;
1404  goto fail;
1405  }
1406 
1408  "Initialize MFX session: API version is %d.%d, implementation version is %d.%d\n",
1409  MFX_VERSION_MAJOR, MFX_VERSION_MINOR, ver.Major, ver.Minor);
1410 
1411  MFXClose(hwctx->session);
1412 
1413  err = MFXInit(implementation, &ver, &hwctx->session);
1414  if (err != MFX_ERR_NONE) {
1416  "Error initializing an MFX session: %d.\n", err);
1417  ret = AVERROR_UNKNOWN;
1418  goto fail;
1419  }
1420 
1421  err = MFXVideoCORE_SetHandle(hwctx->session, handle_type, handle);
1422  if (err != MFX_ERR_NONE) {
1423  av_log(ctx, AV_LOG_ERROR, "Error setting child device handle: "
1424  "%d\n", err);
1425  ret = AVERROR_UNKNOWN;
1426  goto fail;
1427  }
1428 
1429  return 0;
1430 
1431 fail:
1432  if (hwctx->session)
1433  MFXClose(hwctx->session);
1434  return ret;
1435 }
1436 
1438  AVHWDeviceContext *child_device_ctx,
1439  AVDictionary *opts, int flags)
1440 {
1441  mfxIMPL impl;
1442  impl = choose_implementation("hw_any", child_device_ctx->type);
1443  return qsv_device_derive_from_child(ctx, impl,
1444  child_device_ctx, flags);
1445 }
1446 
1447 static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
1448  AVDictionary *opts, int flags)
1449 {
1450  QSVDevicePriv *priv;
1451  enum AVHWDeviceType child_device_type;
1452  AVHWDeviceContext *child_device;
1453  AVDictionary *child_device_opts;
1454  AVDictionaryEntry *e;
1455 
1456  mfxIMPL impl;
1457  int ret;
1458 
1459  priv = av_mallocz(sizeof(*priv));
1460  if (!priv)
1461  return AVERROR(ENOMEM);
1462 
1463  ctx->user_opaque = priv;
1464  ctx->free = qsv_device_free;
1465 
1466  e = av_dict_get(opts, "child_device_type", NULL, 0);
1467  if (e) {
1468  child_device_type = av_hwdevice_find_type_by_name(e ? e->value : NULL);
1469  if (child_device_type == AV_HWDEVICE_TYPE_NONE) {
1470  av_log(ctx, AV_LOG_ERROR, "Unknown child device type "
1471  "\"%s\".\n", e ? e->value : NULL);
1472  return AVERROR(EINVAL);
1473  }
1474  } else if (CONFIG_VAAPI) {
1475  child_device_type = AV_HWDEVICE_TYPE_VAAPI;
1476  } else if (CONFIG_DXVA2) {
1478  "WARNING: defaulting child_device_type to AV_HWDEVICE_TYPE_DXVA2 for compatibility "
1479  "with old commandlines. This behaviour will be removed "
1480  "in the future. Please explicitly set device type via \"-init_hw_device\" option.\n");
1481  child_device_type = AV_HWDEVICE_TYPE_DXVA2;
1482  } else if (CONFIG_D3D11VA) {
1483  child_device_type = AV_HWDEVICE_TYPE_D3D11VA;
1484  } else {
1485  av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
1486  return AVERROR(ENOSYS);
1487  }
1488 
1489  child_device_opts = NULL;
1490  switch (child_device_type) {
1491 #if CONFIG_VAAPI
1493  {
1494  // libmfx does not actually implement VAAPI properly, rather it
1495  // depends on the specific behaviour of a matching iHD driver when
1496  // used on recent Intel hardware. Set options to the VAAPI device
1497  // creation so that we should pick a usable setup by default if
1498  // possible, even when multiple devices and drivers are available.
1499  av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
1500  av_dict_set(&child_device_opts, "driver", "iHD", 0);
1501  }
1502  break;
1503 #endif
1504 #if CONFIG_D3D11VA
1506  break;
1507 #endif
1508 #if CONFIG_DXVA2
1510  break;
1511 #endif
1512  default:
1513  {
1514  av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
1515  return AVERROR(ENOSYS);
1516  }
1517  break;
1518  }
1519 
1520  e = av_dict_get(opts, "child_device", NULL, 0);
1521  ret = av_hwdevice_ctx_create(&priv->child_device_ctx, child_device_type,
1522  e ? e->value : NULL, child_device_opts, 0);
1523 
1524  av_dict_free(&child_device_opts);
1525  if (ret < 0)
1526  return ret;
1527 
1528  child_device = (AVHWDeviceContext*)priv->child_device_ctx->data;
1529 
1530  impl = choose_implementation(device, child_device_type);
1531 
1532  return qsv_device_derive_from_child(ctx, impl, child_device, 0);
1533 }
1534 
1537  .name = "QSV",
1538 
1539  .device_hwctx_size = sizeof(AVQSVDeviceContext),
1540  .device_priv_size = sizeof(QSVDeviceContext),
1541  .frames_hwctx_size = sizeof(AVQSVFramesContext),
1542  .frames_priv_size = sizeof(QSVFramesContext),
1543 
1544  .device_create = qsv_device_create,
1545  .device_derive = qsv_device_derive,
1546  .device_init = qsv_device_init,
1547  .frames_get_constraints = qsv_frames_get_constraints,
1548  .frames_init = qsv_frames_init,
1549  .frames_uninit = qsv_frames_uninit,
1550  .frames_get_buffer = qsv_get_buffer,
1551  .transfer_get_formats = qsv_transfer_get_formats,
1552  .transfer_data_to = qsv_transfer_data_to,
1553  .transfer_data_from = qsv_transfer_data_from,
1554  .map_to = qsv_map_to,
1555  .map_from = qsv_map_from,
1556  .frames_derive_to = qsv_frames_derive_to,
1557  .frames_derive_from = qsv_frames_derive_from,
1558 
1559  .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_QSV, AV_PIX_FMT_NONE },
1560 };
formats
formats
Definition: signature.h:48
pthread_mutex_t
_fmutex pthread_mutex_t
Definition: os2threads.h:53
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
qsv_transfer_data_child
static int qsv_transfer_data_child(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:867
AVQSVFramesContext::frame_type
int frame_type
A combination of MFX_MEMTYPE_* describing the frame pool.
Definition: hwcontext_qsv.h:49
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
out
FILE * out
Definition: movenc.c:54
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:246
QSVFramesContext::child_frames_ref
AVBufferRef * child_frames_ref
Definition: hwcontext_qsv.c:81
qsv_transfer_data_to
static int qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:1013
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2564
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
pthread_mutex_init
static av_always_inline int pthread_mutex_init(pthread_mutex_t *mutex, const pthread_mutexattr_t *attr)
Definition: os2threads.h:104
qsv_map_from
static int qsv_map_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_qsv.c:764
qsv_fourcc_from_pix_fmt
static uint32_t qsv_fourcc_from_pix_fmt(enum AVPixelFormat pix_fmt)
Definition: hwcontext_qsv.c:113
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:209
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:112
QSVDeviceContext::ver
mfxVersion ver
Definition: hwcontext_qsv.c:64
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:333
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:303
pixdesc.h
index
fg index
Definition: ffmpeg_filter.c:168
AVFrame::width
int width
Definition: frame.h:361
AVQSVDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
AVD3D11VAFramesContext::MiscFlags
UINT MiscFlags
D3D11_TEXTURE2D_DESC.MiscFlags used for texture creation.
Definition: hwcontext_d3d11va.h:166
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:247
qsv_device_derive
static int qsv_device_derive(AVHWDeviceContext *ctx, AVHWDeviceContext *child_device_ctx, AVDictionary *opts, int flags)
Definition: hwcontext_qsv.c:1437
AVDXVA2FramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_dxva2.h:46
qsv_frames_derive_from
static int qsv_frames_derive_from(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_qsv.c:702
AV_HWDEVICE_TYPE_NONE
@ AV_HWDEVICE_TYPE_NONE
Definition: hwcontext.h:28
av_hwframe_map
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
Definition: hwcontext.c:789
qsv_init_surface
static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
Definition: hwcontext_qsv.c:392
data
const char data[16]
Definition: mxf.c:143
choose_implementation
static mfxIMPL choose_implementation(const char *device, enum AVHWDeviceType child_device_type)
Definition: hwcontext_qsv.c:1307
QSVDeviceContext
Definition: hwcontext_qsv.c:61
av_hwdevice_find_type_by_name
enum AVHWDeviceType av_hwdevice_find_type_by_name(const char *name)
Look up an AVHWDeviceType by name.
Definition: hwcontext.c:82
AVDXVA2DeviceContext::devmgr
IDirect3DDeviceManager9 * devmgr
Definition: hwcontext_dxva2.h:40
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVVAAPIDeviceContext::display
VADisplay display
The VADisplay handle, to be filled by the user.
Definition: hwcontext_vaapi.h:72
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:95
AVHWFramesContext::internal
AVHWFramesInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:134
AVDictionary
Definition: dict.c:30
ff_hwframe_map_create
int ff_hwframe_map_create(AVBufferRef *hwframe_ref, AVFrame *dst, const AVFrame *src, void(*unmap)(AVHWFramesContext *ctx, HWMapDescriptor *hwmap), void *priv)
Definition: hwcontext.c:737
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AVHWFramesConstraints::valid_hw_formats
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:458
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:229
fourcc
uint32_t fourcc
Definition: hwcontext_qsv.c:97
av_hwdevice_ctx_init
int av_hwdevice_ctx_init(AVBufferRef *ref)
Finalize the device context before use.
Definition: hwcontext.c:200
AVFrame::buf
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:477
QSVDeviceContext::handle_type
mfxHandleType handle_type
Definition: hwcontext_qsv.c:63
qsv_transfer_data_from
static int qsv_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:944
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:317
AVHWFramesConstraints
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:453
QSVDevicePriv
Definition: hwcontext_qsv.c:57
AVD3D11VAFramesContext::BindFlags
UINT BindFlags
D3D11_TEXTURE2D_DESC.BindFlags used for texture creation.
Definition: hwcontext_d3d11va.h:160
AVVAAPIFramesContext::surface_ids
VASurfaceID * surface_ids
The surfaces IDs of all surfaces in the pool after creation.
Definition: hwcontext_vaapi.h:101
AVHWFramesInternal::priv
void * priv
Definition: hwcontext_internal.h:116
AVD3D11FrameDescriptor::texture
ID3D11Texture2D * texture
The texture in which the frame is located.
Definition: hwcontext_d3d11va.h:117
QSVDeviceContext::child_device_type
enum AVHWDeviceType child_device_type
Definition: hwcontext_qsv.c:67
qsv_init_child_ctx
static int qsv_init_child_ctx(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:247
fail
#define fail()
Definition: checkasm.h:127
AV_PIX_FMT_FLAG_HWACCEL
#define AV_PIX_FMT_FLAG_HWACCEL
Pixel format is an HW accelerated format.
Definition: pixdesc.h:128
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
qsv_frames_get_constraints
static int qsv_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
Definition: hwcontext_qsv.c:1270
av_buffer_pool_init2
AVBufferPool * av_buffer_pool_init2(size_t size, void *opaque, AVBufferRef *(*alloc)(void *opaque, size_t size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
Definition: buffer.c:259
qsv_frames_derive_to
static int qsv_frames_derive_to(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_qsv.c:1108
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:99
frame_free
static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
Definition: hwcontext_qsv.c:501
AV_PIX_FMT_Y210
#define AV_PIX_FMT_Y210
Definition: pixfmt.h:443
HWContextType::type
enum AVHWDeviceType type
Definition: hwcontext_internal.h:30
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVHWFramesContext::height
int height
Definition: hwcontext.h:229
av_hwdevice_ctx_alloc
AVBufferRef * av_hwdevice_ctx_alloc(enum AVHWDeviceType type)
Allocate an AVHWDeviceContext for a given hardware type.
Definition: hwcontext.c:142
AVHWFramesConstraints::valid_sw_formats
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:465
QSVFramesContext::ext_buffers
mfxExtBuffer * ext_buffers[1]
Definition: hwcontext_qsv.c:92
frame_alloc
static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
Definition: hwcontext_qsv.c:473
av_dict_get
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:40
av_buffer_pool_get
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:387
QSVDevicePriv::child_device_ctx
AVBufferRef * child_device_ctx
Definition: hwcontext_qsv.c:58
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:127
s
#define s(width, name)
Definition: cbs_vp9.c:257
AVD3D11VADeviceContext::device
ID3D11Device * device
Device used for texture creation and access.
Definition: hwcontext_d3d11va.h:56
AVHWDeviceType
AVHWDeviceType
Definition: hwcontext.h:27
QSVDeviceContext::handle
mfxHDL handle
Definition: hwcontext_qsv.c:62
QSVFramesContext::mem_ids
mfxMemId * mem_ids
Definition: hwcontext_qsv.c:87
ctx
AVFormatContext * ctx
Definition: movenc.c:48
AVDXVA2FramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_dxva2.h:59
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
av_usleep
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
if
if(ret)
Definition: filter_design.txt:179
ff_hwcontext_type_qsv
const HWContextType ff_hwcontext_type_qsv
Definition: hwcontext_qsv.c:1535
opts
AVDictionary * opts
Definition: movenc.c:50
AVD3D11VAFramesContext::texture_infos
AVD3D11FrameDescriptor * texture_infos
In case if texture structure member above is not NULL contains the same texture pointer for all eleme...
Definition: hwcontext_d3d11va.h:175
AVQSVFramesContext::surfaces
mfxFrameSurface1 * surfaces
Definition: hwcontext_qsv.h:43
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
qsv_frames_uninit
static void qsv_frames_uninit(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:196
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AV_HWDEVICE_TYPE_DXVA2
@ AV_HWDEVICE_TYPE_DXVA2
Definition: hwcontext.h:32
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
MFX_IMPL_VIA_MASK
#define MFX_IMPL_VIA_MASK(impl)
Definition: hwcontext_qsv.c:55
src
#define src
Definition: vp8dsp.c:255
qsv_frames_init
static int qsv_frames_init(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:604
time.h
AV_PIX_FMT_QSV
@ AV_PIX_FMT_QSV
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:212
map_frame_to_surface
static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
Definition: hwcontext_qsv.c:901
frame_unlock
static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: hwcontext_qsv.c:511
pthread_mutex_unlock
#define pthread_mutex_unlock(a)
Definition: ffprobe.c:68
AVD3D11VAFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_d3d11va.h:131
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
qsv_init_internal_session
static int qsv_init_internal_session(AVHWFramesContext *ctx, mfxSession *session, int upload)
Definition: hwcontext_qsv.c:528
hwcontext_dxva2.h
QSVFramesContext::opaque_alloc
mfxExtOpaqueSurfaceAlloc opaque_alloc
Definition: hwcontext_qsv.c:91
QSVFramesContext::session_upload_init
int session_upload_init
Definition: hwcontext_qsv.c:75
qsv_get_buffer
static int qsv_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
Definition: hwcontext_qsv.c:670
AVDXVA2FramesContext::surface_type
DWORD surface_type
The surface type (e.g.
Definition: hwcontext_dxva2.h:51
av_frame_copy
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:678
QSVFramesContext::session_download_init
int session_download_init
Definition: hwcontext_qsv.c:73
size
int size
Definition: twinvq_data.h:10344
QSVFramesContext::nb_surfaces_used
int nb_surfaces_used
Definition: hwcontext_qsv.c:84
qsv_device_free
static void qsv_device_free(AVHWDeviceContext *ctx)
Definition: hwcontext_qsv.c:1295
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:376
qsv_transfer_get_formats
static int qsv_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
Definition: hwcontext_qsv.c:684
buffer.h
AVD3D11VAFramesContext::texture
ID3D11Texture2D * texture
The canonical texture used for pool allocation.
Definition: hwcontext_d3d11va.h:152
qsv_device_derive_from_child
static int qsv_device_derive_from_child(AVHWDeviceContext *ctx, mfxIMPL implementation, AVHWDeviceContext *child_device_ctx, int flags)
Definition: hwcontext_qsv.c:1346
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:303
av_dict_free
void av_dict_free(AVDictionary **pm)
Free all the memory allocated for an AVDictionary struct and all keys and values.
Definition: dict.c:203
AVQSVFramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_qsv.h:44
frame_get_hdl
static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
Definition: hwcontext_qsv.c:516
AV_PIX_FMT_VAAPI
@ AV_PIX_FMT_VAAPI
Hardware acceleration through VA-API, data[3] contains a VASurfaceID.
Definition: pixfmt.h:119
pthread_cond_destroy
static av_always_inline int pthread_cond_destroy(pthread_cond_t *cond)
Definition: os2threads.h:144
AV_HWDEVICE_TYPE_VAAPI
@ AV_HWDEVICE_TYPE_VAAPI
Definition: hwcontext.h:31
pthread_mutex_destroy
static av_always_inline int pthread_mutex_destroy(pthread_mutex_t *mutex)
Definition: os2threads.h:112
hwcontext_qsv.h
qsv_device_init
static int qsv_device_init(AVHWDeviceContext *ctx)
Definition: hwcontext_qsv.c:140
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:271
AVDXVA2DeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_dxva2.h:39
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:32
common.h
AVD3D11VADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_d3d11va.h:45
QSVFramesContext::handle_pairs_internal
mfxHDLPair * handle_pairs_internal
Definition: hwcontext_qsv.c:83
AVD3D11FrameDescriptor::index
intptr_t index
The index into the array texture element representing the frame, or 0 if the texture is not an array ...
Definition: hwcontext_d3d11va.h:125
QSVFramesContext::surface_ptrs
mfxFrameSurface1 ** surface_ptrs
Definition: hwcontext_qsv.c:89
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:437
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:263
QSVFramesContext::session_download
mfxSession session_download
Definition: hwcontext_qsv.c:72
AVDXVA2FramesContext::surfaces
IDirect3DSurface9 ** surfaces
The surface pool.
Definition: hwcontext_dxva2.h:58
pthread_cond_t
Definition: os2threads.h:58
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:271
AVHWFrameTransferDirection
AVHWFrameTransferDirection
Definition: hwcontext.h:415
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
hwcontext_vaapi.h
qsv_map_to
static int qsv_map_to(AVHWFramesContext *dst_ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_qsv.c:1211
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
ret
ret
Definition: filter_design.txt:187
pix_fmt
enum AVPixelFormat pix_fmt
Definition: hwcontext_qsv.c:96
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:79
pixfmt.h
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVHWFramesContext::device_ctx
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:149
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:162
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:610
QSVDeviceContext::impl
mfxIMPL impl
Definition: hwcontext_qsv.c:65
av_hwframe_transfer_data
int av_hwframe_transfer_data(AVFrame *dst, const AVFrame *src, int flags)
Copy data to or from a hw surface.
Definition: hwcontext.c:443
pthread_cond_signal
static av_always_inline int pthread_cond_signal(pthread_cond_t *cond)
Definition: os2threads.h:152
frame_lock
static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: hwcontext_qsv.c:506
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:607
AV_HWDEVICE_TYPE_QSV
@ AV_HWDEVICE_TYPE_QSV
Definition: hwcontext.h:33
qsv_pool_release_dummy
static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
Definition: hwcontext_qsv.c:226
AVFrame::height
int height
Definition: frame.h:361
QSVDeviceContext::child_pix_fmt
enum AVPixelFormat child_pix_fmt
Definition: hwcontext_qsv.c:68
AVVAAPIFramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_vaapi.h:102
AVQSVDeviceContext::session
mfxSession session
Definition: hwcontext_qsv.h:36
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
dummy
int dummy
Definition: motion.c:65
supported_pixel_formats
static const struct @303 supported_pixel_formats[]
QSVFramesContext::session_upload
mfxSession session_upload
Definition: hwcontext_qsv.c:74
qsv_device_create
static int qsv_device_create(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)
Definition: hwcontext_qsv.c:1447
pthread_cond_wait
static av_always_inline int pthread_cond_wait(pthread_cond_t *cond, pthread_mutex_t *mutex)
Definition: os2threads.h:192
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:440
AVQSVFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:42
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:199
desc
const char * desc
Definition: libsvtav1.c:79
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
hwcontext_internal.h
AVVAAPIFramesContext
VAAPI-specific data associated with a frame pool.
Definition: hwcontext_vaapi.h:88
QSVFramesContext::surfaces_internal
mfxFrameSurface1 * surfaces_internal
Definition: hwcontext_qsv.c:82
AVDictionaryEntry
Definition: dict.h:79
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
QSVFramesContext
Definition: qsv_internal.h:93
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:70
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:561
hwcontext.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
HWContextType
Definition: hwcontext_internal.h:29
qsv_pool_alloc
static AVBufferRef * qsv_pool_alloc(void *opaque, size_t size)
Definition: hwcontext_qsv.c:230
pthread_cond_init
static av_always_inline int pthread_cond_init(pthread_cond_t *cond, const pthread_condattr_t *attr)
Definition: os2threads.h:133
ID3D11Device
void ID3D11Device
Definition: nvenc.h:28
AVVAAPIDeviceContext
VAAPI connection details.
Definition: hwcontext_vaapi.h:68
AVDictionaryEntry::value
char * value
Definition: dict.h:81
hwcontext_d3d11va.h
qsv_init_pool
static int qsv_init_pool(AVHWFramesContext *ctx, uint32_t fourcc)
Definition: hwcontext_qsv.c:428
pthread_mutex_lock
#define pthread_mutex_lock(a)
Definition: ffprobe.c:64