FFmpeg
hwcontext_qsv.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include <stdatomic.h>
20 #include <stdint.h>
21 #include <string.h>
22 
23 #include <mfx/mfxvideo.h>
24 
25 #include "config.h"
26 
27 #if HAVE_PTHREADS
28 #include <pthread.h>
29 #endif
30 
31 #define COBJMACROS
32 #if CONFIG_VAAPI
33 #include "hwcontext_vaapi.h"
34 #endif
35 #if CONFIG_D3D11VA
36 #include "hwcontext_d3d11va.h"
37 #endif
38 #if CONFIG_DXVA2
39 #include "hwcontext_dxva2.h"
40 #endif
41 
42 #include "buffer.h"
43 #include "common.h"
44 #include "hwcontext.h"
45 #include "hwcontext_internal.h"
46 #include "hwcontext_qsv.h"
47 #include "mem.h"
48 #include "pixfmt.h"
49 #include "pixdesc.h"
50 #include "time.h"
51 #include "imgutils.h"
52 
53 #define QSV_VERSION_ATLEAST(MAJOR, MINOR) \
54  (MFX_VERSION_MAJOR > (MAJOR) || \
55  MFX_VERSION_MAJOR == (MAJOR) && MFX_VERSION_MINOR >= (MINOR))
56 
57 #define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
58 
59 typedef struct QSVDevicePriv {
62 
63 typedef struct QSVDeviceContext {
64  mfxHDL handle;
65  mfxHandleType handle_type;
66  mfxVersion ver;
67  mfxIMPL impl;
68 
72 
73 typedef struct QSVFramesContext {
74  mfxSession session_download;
76  mfxSession session_upload;
78 #if HAVE_PTHREADS
79  pthread_mutex_t session_lock;
80 #endif
81 
83  mfxFrameSurface1 *surfaces_internal;
84  mfxHDLPair *handle_pairs_internal;
86 
87  // used in the frame allocator for non-opaque surfaces
88  mfxMemId *mem_ids;
89  // used in the opaque alloc request for opaque surfaces
90  mfxFrameSurface1 **surface_ptrs;
91 
92  mfxExtOpaqueSurfaceAlloc opaque_alloc;
93  mfxExtBuffer *ext_buffers[1];
97 
98 static const struct {
100  uint32_t fourcc;
102  { AV_PIX_FMT_NV12, MFX_FOURCC_NV12 },
103  { AV_PIX_FMT_BGRA, MFX_FOURCC_RGB4 },
104  { AV_PIX_FMT_P010, MFX_FOURCC_P010 },
105  { AV_PIX_FMT_PAL8, MFX_FOURCC_P8 },
106 #if CONFIG_VAAPI
108  MFX_FOURCC_YUY2 },
109  { AV_PIX_FMT_Y210,
110  MFX_FOURCC_Y210 },
111 #endif
112 };
113 
114 extern int ff_qsv_get_surface_base_handle(mfxFrameSurface1 *surf,
115  enum AVHWDeviceType base_dev_type,
116  void **base_handle);
117 
118 /**
119  * Caller needs to allocate enough space for base_handle pointer.
120  **/
121 int ff_qsv_get_surface_base_handle(mfxFrameSurface1 *surf,
122  enum AVHWDeviceType base_dev_type,
123  void **base_handle)
124 {
125  mfxHDLPair *handle_pair;
126  handle_pair = surf->Data.MemId;
127  switch (base_dev_type) {
128 #if CONFIG_VAAPI
130  base_handle[0] = handle_pair->first;
131  return 0;
132 #endif
133 #if CONFIG_D3D11VA
135  base_handle[0] = handle_pair->first;
136  base_handle[1] = handle_pair->second;
137  return 0;
138 #endif
139 #if CONFIG_DXVA2
141  base_handle[0] = handle_pair->first;
142  return 0;
143 #endif
144  }
145  return AVERROR(EINVAL);
146 }
147 
149 {
150  int i;
151  for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) {
153  return supported_pixel_formats[i].fourcc;
154  }
155  return 0;
156 }
157 
158 #if CONFIG_D3D11VA
159 static uint32_t qsv_get_d3d11va_bind_flags(int mem_type)
160 {
161  uint32_t bind_flags = 0;
162 
163  if ((mem_type & MFX_MEMTYPE_VIDEO_MEMORY_ENCODER_TARGET) && (mem_type & MFX_MEMTYPE_INTERNAL_FRAME))
164  bind_flags = D3D11_BIND_DECODER | D3D11_BIND_VIDEO_ENCODER;
165  else
166  bind_flags = D3D11_BIND_DECODER;
167 
168  if ((MFX_MEMTYPE_FROM_VPPOUT & mem_type) || (MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET & mem_type))
169  bind_flags = D3D11_BIND_RENDER_TARGET;
170 
171  return bind_flags;
172 }
173 #endif
174 
175 static int qsv_fill_border(AVFrame *dst, const AVFrame *src)
176 {
177  const AVPixFmtDescriptor *desc;
178  int i, planes_nb = 0;
179  if (dst->format != src->format)
180  return AVERROR(EINVAL);
181 
183 
184  for (i = 0; i < desc->nb_components; i++)
185  planes_nb = FFMAX(planes_nb, desc->comp[i].plane + 1);
186 
187  for (i = 0; i < planes_nb; i++) {
188  int sheight, dheight, y;
189  ptrdiff_t swidth = av_image_get_linesize(src->format,
190  src->width,
191  i);
192  ptrdiff_t dwidth = av_image_get_linesize(dst->format,
193  dst->width,
194  i);
195  const AVComponentDescriptor comp = desc->comp[i];
196  if (swidth < 0 || dwidth < 0) {
197  av_log(NULL, AV_LOG_ERROR, "av_image_get_linesize failed\n");
198  return AVERROR(EINVAL);
199  }
200  sheight = src->height;
201  dheight = dst->height;
202  if (i) {
203  sheight = AV_CEIL_RSHIFT(src->height, desc->log2_chroma_h);
204  dheight = AV_CEIL_RSHIFT(dst->height, desc->log2_chroma_h);
205  }
206  //fill right padding
207  for (y = 0; y < sheight; y++) {
208  void *line_ptr = dst->data[i] + y*dst->linesize[i] + swidth;
209  av_memcpy_backptr(line_ptr,
210  comp.depth > 8 ? 2 : 1,
211  dwidth - swidth);
212  }
213  //fill bottom padding
214  for (y = sheight; y < dheight; y++) {
215  memcpy(dst->data[i]+y*dst->linesize[i],
216  dst->data[i]+(sheight-1)*dst->linesize[i],
217  dwidth);
218  }
219  }
220  return 0;
221 }
222 
224 {
225  AVQSVDeviceContext *hwctx = ctx->hwctx;
226  QSVDeviceContext *s = ctx->internal->priv;
227  int hw_handle_supported = 0;
228  mfxHandleType handle_type;
229  enum AVHWDeviceType device_type;
230  enum AVPixelFormat pix_fmt;
231  mfxStatus err;
232 
233  err = MFXQueryIMPL(hwctx->session, &s->impl);
234  if (err == MFX_ERR_NONE)
235  err = MFXQueryVersion(hwctx->session, &s->ver);
236  if (err != MFX_ERR_NONE) {
237  av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes\n");
238  return AVERROR_UNKNOWN;
239  }
240 
241  if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(s->impl)) {
242 #if CONFIG_VAAPI
243  handle_type = MFX_HANDLE_VA_DISPLAY;
244  device_type = AV_HWDEVICE_TYPE_VAAPI;
246  hw_handle_supported = 1;
247 #endif
248  } else if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(s->impl)) {
249 #if CONFIG_D3D11VA
250  handle_type = MFX_HANDLE_D3D11_DEVICE;
251  device_type = AV_HWDEVICE_TYPE_D3D11VA;
253  hw_handle_supported = 1;
254 #endif
255  } else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(s->impl)) {
256 #if CONFIG_DXVA2
257  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
258  device_type = AV_HWDEVICE_TYPE_DXVA2;
260  hw_handle_supported = 1;
261 #endif
262  }
263 
264  if (hw_handle_supported) {
265  err = MFXVideoCORE_GetHandle(hwctx->session, handle_type, &s->handle);
266  if (err == MFX_ERR_NONE) {
267  s->handle_type = handle_type;
268  s->child_device_type = device_type;
269  s->child_pix_fmt = pix_fmt;
270  }
271  }
272  if (!s->handle) {
273  av_log(ctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
274  "from the session\n");
275  }
276  return 0;
277 }
278 
280 {
281  QSVFramesContext *s = ctx->internal->priv;
282 
283  if (s->session_download) {
284  MFXVideoVPP_Close(s->session_download);
285  MFXClose(s->session_download);
286  }
287  s->session_download = NULL;
288  s->session_download_init = 0;
289 
290  if (s->session_upload) {
291  MFXVideoVPP_Close(s->session_upload);
292  MFXClose(s->session_upload);
293  }
294  s->session_upload = NULL;
295  s->session_upload_init = 0;
296 
297 #if HAVE_PTHREADS
298  pthread_mutex_destroy(&s->session_lock);
299 #endif
300 
301  av_freep(&s->mem_ids);
302  av_freep(&s->surface_ptrs);
303  av_freep(&s->surfaces_internal);
304  av_freep(&s->handle_pairs_internal);
305  av_frame_unref(&s->realigned_upload_frame);
306  av_frame_unref(&s->realigned_download_frame);
307  av_buffer_unref(&s->child_frames_ref);
308 }
309 
310 static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
311 {
312 }
313 
314 static AVBufferRef *qsv_pool_alloc(void *opaque, size_t size)
315 {
317  QSVFramesContext *s = ctx->internal->priv;
318  AVQSVFramesContext *hwctx = ctx->hwctx;
319 
320  if (s->nb_surfaces_used < hwctx->nb_surfaces) {
321  s->nb_surfaces_used++;
322  return av_buffer_create((uint8_t*)(s->surfaces_internal + s->nb_surfaces_used - 1),
323  sizeof(*hwctx->surfaces), qsv_pool_release_dummy, NULL, 0);
324  }
325 
326  return NULL;
327 }
328 
330 {
331  AVQSVFramesContext *hwctx = ctx->hwctx;
332  QSVFramesContext *s = ctx->internal->priv;
333  QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
334 
335  AVBufferRef *child_device_ref = NULL;
336  AVBufferRef *child_frames_ref = NULL;
337 
338  AVHWDeviceContext *child_device_ctx;
339  AVHWFramesContext *child_frames_ctx;
340 
341  int i, ret = 0;
342 
343  if (!device_priv->handle) {
345  "Cannot create a non-opaque internal surface pool without "
346  "a hardware handle\n");
347  return AVERROR(EINVAL);
348  }
349 
350  child_device_ref = av_hwdevice_ctx_alloc(device_priv->child_device_type);
351  if (!child_device_ref)
352  return AVERROR(ENOMEM);
353  child_device_ctx = (AVHWDeviceContext*)child_device_ref->data;
354 
355 #if CONFIG_VAAPI
356  if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
357  AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
358  child_device_hwctx->display = (VADisplay)device_priv->handle;
359  }
360 #endif
361 #if CONFIG_D3D11VA
362  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
363  AVD3D11VADeviceContext *child_device_hwctx = child_device_ctx->hwctx;
364  ID3D11Device_AddRef((ID3D11Device*)device_priv->handle);
365  child_device_hwctx->device = (ID3D11Device*)device_priv->handle;
366  }
367 #endif
368 #if CONFIG_DXVA2
369  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
370  AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
371  child_device_hwctx->devmgr = (IDirect3DDeviceManager9*)device_priv->handle;
372  }
373 #endif
374 
375  ret = av_hwdevice_ctx_init(child_device_ref);
376  if (ret < 0) {
377  av_log(ctx, AV_LOG_ERROR, "Error initializing a child device context\n");
378  goto fail;
379  }
380 
381  child_frames_ref = av_hwframe_ctx_alloc(child_device_ref);
382  if (!child_frames_ref) {
383  ret = AVERROR(ENOMEM);
384  goto fail;
385  }
386  child_frames_ctx = (AVHWFramesContext*)child_frames_ref->data;
387 
388  child_frames_ctx->format = device_priv->child_pix_fmt;
389  child_frames_ctx->sw_format = ctx->sw_format;
390  child_frames_ctx->initial_pool_size = ctx->initial_pool_size;
391  child_frames_ctx->width = FFALIGN(ctx->width, 16);
392  child_frames_ctx->height = FFALIGN(ctx->height, 16);
393 
394 #if CONFIG_D3D11VA
395  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
396  AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
397  if (hwctx->frame_type == 0)
398  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
399  if (hwctx->frame_type & MFX_MEMTYPE_SHARED_RESOURCE)
400  child_frames_hwctx->MiscFlags = D3D11_RESOURCE_MISC_SHARED;
401  child_frames_hwctx->BindFlags = qsv_get_d3d11va_bind_flags(hwctx->frame_type);
402  }
403 #endif
404 #if CONFIG_DXVA2
405  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
406  AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
407  if (hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)
408  child_frames_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
409  else
410  child_frames_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
411  }
412 #endif
413 
414  ret = av_hwframe_ctx_init(child_frames_ref);
415  if (ret < 0) {
416  av_log(ctx, AV_LOG_ERROR, "Error initializing a child frames context\n");
417  goto fail;
418  }
419 
420 #if CONFIG_VAAPI
421  if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
422  AVVAAPIFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
423  for (i = 0; i < ctx->initial_pool_size; i++) {
424  s->handle_pairs_internal[i].first = child_frames_hwctx->surface_ids + i;
425  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
426  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
427  }
428  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
429  }
430 #endif
431 #if CONFIG_D3D11VA
432  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
433  AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
434  for (i = 0; i < ctx->initial_pool_size; i++) {
435  s->handle_pairs_internal[i].first = (mfxMemId)child_frames_hwctx->texture_infos[i].texture;
436  if(child_frames_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
437  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
438  } else {
439  s->handle_pairs_internal[i].second = (mfxMemId)child_frames_hwctx->texture_infos[i].index;
440  }
441  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
442  }
443  if (child_frames_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
444  hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
445  } else {
446  hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
447  }
448  }
449 #endif
450 #if CONFIG_DXVA2
451  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
452  AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
453  for (i = 0; i < ctx->initial_pool_size; i++) {
454  s->handle_pairs_internal[i].first = (mfxMemId)child_frames_hwctx->surfaces[i];
455  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
456  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
457  }
458  if (child_frames_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
459  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
460  else
461  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
462  }
463 #endif
464 
465  s->child_frames_ref = child_frames_ref;
466  child_frames_ref = NULL;
467 
468 fail:
469  av_buffer_unref(&child_device_ref);
470  av_buffer_unref(&child_frames_ref);
471  return ret;
472 }
473 
474 static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
475 {
476  const AVPixFmtDescriptor *desc;
477  uint32_t fourcc;
478 
479  desc = av_pix_fmt_desc_get(ctx->sw_format);
480  if (!desc)
481  return AVERROR(EINVAL);
482 
483  fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
484  if (!fourcc)
485  return AVERROR(EINVAL);
486 
487  surf->Info.BitDepthLuma = desc->comp[0].depth;
488  surf->Info.BitDepthChroma = desc->comp[0].depth;
489  surf->Info.Shift = desc->comp[0].depth > 8;
490 
491  if (desc->log2_chroma_w && desc->log2_chroma_h)
492  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
493  else if (desc->log2_chroma_w)
494  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV422;
495  else
496  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV444;
497 
498  surf->Info.FourCC = fourcc;
499  surf->Info.Width = FFALIGN(ctx->width, 16);
500  surf->Info.CropW = ctx->width;
501  surf->Info.Height = FFALIGN(ctx->height, 16);
502  surf->Info.CropH = ctx->height;
503  surf->Info.FrameRateExtN = 25;
504  surf->Info.FrameRateExtD = 1;
505  surf->Info.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
506 
507  return 0;
508 }
509 
511 {
512  QSVFramesContext *s = ctx->internal->priv;
513  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
514 
515  int i, ret = 0;
516 
517  if (ctx->initial_pool_size <= 0) {
518  av_log(ctx, AV_LOG_ERROR, "QSV requires a fixed frame pool size\n");
519  return AVERROR(EINVAL);
520  }
521 
522  s->handle_pairs_internal = av_calloc(ctx->initial_pool_size,
523  sizeof(*s->handle_pairs_internal));
524  if (!s->handle_pairs_internal)
525  return AVERROR(ENOMEM);
526 
527  s->surfaces_internal = av_calloc(ctx->initial_pool_size,
528  sizeof(*s->surfaces_internal));
529  if (!s->surfaces_internal)
530  return AVERROR(ENOMEM);
531 
532  for (i = 0; i < ctx->initial_pool_size; i++) {
533  ret = qsv_init_surface(ctx, &s->surfaces_internal[i]);
534  if (ret < 0)
535  return ret;
536  }
537 
538  if (!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)) {
540  if (ret < 0)
541  return ret;
542  }
543 
544  ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(mfxFrameSurface1),
546  if (!ctx->internal->pool_internal)
547  return AVERROR(ENOMEM);
548 
549  frames_hwctx->surfaces = s->surfaces_internal;
550  frames_hwctx->nb_surfaces = ctx->initial_pool_size;
551 
552  return 0;
553 }
554 
555 static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
556  mfxFrameAllocResponse *resp)
557 {
558  AVHWFramesContext *ctx = pthis;
559  QSVFramesContext *s = ctx->internal->priv;
560  AVQSVFramesContext *hwctx = ctx->hwctx;
561  mfxFrameInfo *i = &req->Info;
562  mfxFrameInfo *i1 = &hwctx->surfaces[0].Info;
563 
564  if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
565  !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
566  !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
567  return MFX_ERR_UNSUPPORTED;
568  if (i->Width > i1->Width || i->Height > i1->Height ||
569  i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
570  av_log(ctx, AV_LOG_ERROR, "Mismatching surface properties in an "
571  "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
572  i->Width, i->Height, i->FourCC, i->ChromaFormat,
573  i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
574  return MFX_ERR_UNSUPPORTED;
575  }
576 
577  resp->mids = s->mem_ids;
578  resp->NumFrameActual = hwctx->nb_surfaces;
579 
580  return MFX_ERR_NONE;
581 }
582 
583 static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
584 {
585  return MFX_ERR_NONE;
586 }
587 
588 static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
589 {
590  return MFX_ERR_UNSUPPORTED;
591 }
592 
593 static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
594 {
595  return MFX_ERR_UNSUPPORTED;
596 }
597 
598 static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
599 {
600  mfxHDLPair *pair_dst = (mfxHDLPair*)hdl;
601  mfxHDLPair *pair_src = (mfxHDLPair*)mid;
602 
603  pair_dst->first = pair_src->first;
604 
605  if (pair_src->second != (mfxMemId)MFX_INFINITE)
606  pair_dst->second = pair_src->second;
607  return MFX_ERR_NONE;
608 }
609 
611  mfxSession *session, int upload)
612 {
613  QSVFramesContext *s = ctx->internal->priv;
614  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
615  QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
616  int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
617 
618  mfxFrameAllocator frame_allocator = {
619  .pthis = ctx,
620  .Alloc = frame_alloc,
621  .Lock = frame_lock,
622  .Unlock = frame_unlock,
623  .GetHDL = frame_get_hdl,
624  .Free = frame_free,
625  };
626 
627  mfxVideoParam par;
628  mfxStatus err;
629 
630  err = MFXInit(device_priv->impl, &device_priv->ver, session);
631  if (err != MFX_ERR_NONE) {
632  av_log(ctx, AV_LOG_ERROR, "Error initializing an internal session\n");
633  return AVERROR_UNKNOWN;
634  }
635 
636  if (device_priv->handle) {
637  err = MFXVideoCORE_SetHandle(*session, device_priv->handle_type,
638  device_priv->handle);
639  if (err != MFX_ERR_NONE)
640  return AVERROR_UNKNOWN;
641  }
642 
643  if (!opaque) {
644  err = MFXVideoCORE_SetFrameAllocator(*session, &frame_allocator);
645  if (err != MFX_ERR_NONE)
646  return AVERROR_UNKNOWN;
647  }
648 
649  memset(&par, 0, sizeof(par));
650 
651  if (opaque) {
652  par.ExtParam = s->ext_buffers;
653  par.NumExtParam = FF_ARRAY_ELEMS(s->ext_buffers);
654  par.IOPattern = upload ? MFX_IOPATTERN_OUT_OPAQUE_MEMORY :
655  MFX_IOPATTERN_IN_OPAQUE_MEMORY;
656  } else {
657  par.IOPattern = upload ? MFX_IOPATTERN_OUT_VIDEO_MEMORY :
658  MFX_IOPATTERN_IN_VIDEO_MEMORY;
659  }
660 
661  par.IOPattern |= upload ? MFX_IOPATTERN_IN_SYSTEM_MEMORY :
662  MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
663  par.AsyncDepth = 1;
664 
665  par.vpp.In = frames_hwctx->surfaces[0].Info;
666 
667  /* Apparently VPP requires the frame rate to be set to some value, otherwise
668  * init will fail (probably for the framerate conversion filter). Since we
669  * are only doing data upload/download here, we just invent an arbitrary
670  * value */
671  par.vpp.In.FrameRateExtN = 25;
672  par.vpp.In.FrameRateExtD = 1;
673  par.vpp.Out = par.vpp.In;
674 
675  err = MFXVideoVPP_Init(*session, &par);
676  if (err != MFX_ERR_NONE) {
677  av_log(ctx, AV_LOG_VERBOSE, "Error opening the internal VPP session."
678  "Surface upload/download will not be possible\n");
679  MFXClose(*session);
680  *session = NULL;
681  }
682 
683  return 0;
684 }
685 
687 {
688  QSVFramesContext *s = ctx->internal->priv;
689  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
690 
691  int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
692 
693  uint32_t fourcc;
694  int i, ret;
695 
696  fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
697  if (!fourcc) {
698  av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format\n");
699  return AVERROR(ENOSYS);
700  }
701 
702  if (!ctx->pool) {
704  if (ret < 0) {
705  av_log(ctx, AV_LOG_ERROR, "Error creating an internal frame pool\n");
706  return ret;
707  }
708  }
709 
710  if (opaque) {
711  s->surface_ptrs = av_calloc(frames_hwctx->nb_surfaces,
712  sizeof(*s->surface_ptrs));
713  if (!s->surface_ptrs)
714  return AVERROR(ENOMEM);
715 
716  for (i = 0; i < frames_hwctx->nb_surfaces; i++)
717  s->surface_ptrs[i] = frames_hwctx->surfaces + i;
718 
719  s->opaque_alloc.In.Surfaces = s->surface_ptrs;
720  s->opaque_alloc.In.NumSurface = frames_hwctx->nb_surfaces;
721  s->opaque_alloc.In.Type = frames_hwctx->frame_type;
722 
723  s->opaque_alloc.Out = s->opaque_alloc.In;
724 
725  s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
726  s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
727 
728  s->ext_buffers[0] = (mfxExtBuffer*)&s->opaque_alloc;
729  } else {
730  s->mem_ids = av_calloc(frames_hwctx->nb_surfaces, sizeof(*s->mem_ids));
731  if (!s->mem_ids)
732  return AVERROR(ENOMEM);
733 
734  for (i = 0; i < frames_hwctx->nb_surfaces; i++)
735  s->mem_ids[i] = frames_hwctx->surfaces[i].Data.MemId;
736  }
737 
738  s->session_download = NULL;
739  s->session_upload = NULL;
740 
741  s->session_download_init = 0;
742  s->session_upload_init = 0;
743 
744 #if HAVE_PTHREADS
745  pthread_mutex_init(&s->session_lock, NULL);
746 #endif
747 
748  return 0;
749 }
750 
752 {
753  frame->buf[0] = av_buffer_pool_get(ctx->pool);
754  if (!frame->buf[0])
755  return AVERROR(ENOMEM);
756 
757  frame->data[3] = frame->buf[0]->data;
758  frame->format = AV_PIX_FMT_QSV;
759  frame->width = ctx->width;
760  frame->height = ctx->height;
761 
762  return 0;
763 }
764 
767  enum AVPixelFormat **formats)
768 {
769  enum AVPixelFormat *fmts;
770 
771  fmts = av_malloc_array(2, sizeof(*fmts));
772  if (!fmts)
773  return AVERROR(ENOMEM);
774 
775  fmts[0] = ctx->sw_format;
776  fmts[1] = AV_PIX_FMT_NONE;
777 
778  *formats = fmts;
779 
780  return 0;
781 }
782 
784  AVHWFramesContext *src_ctx, int flags)
785 {
786  AVQSVFramesContext *src_hwctx = src_ctx->hwctx;
787  int i;
788 
789  switch (dst_ctx->device_ctx->type) {
790 #if CONFIG_VAAPI
792  {
793  AVVAAPIFramesContext *dst_hwctx = dst_ctx->hwctx;
794  dst_hwctx->surface_ids = av_calloc(src_hwctx->nb_surfaces,
795  sizeof(*dst_hwctx->surface_ids));
796  if (!dst_hwctx->surface_ids)
797  return AVERROR(ENOMEM);
798  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
799  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
800  dst_hwctx->surface_ids[i] = *(VASurfaceID*)pair->first;
801  }
802  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
803  }
804  break;
805 #endif
806 #if CONFIG_D3D11VA
808  {
809  D3D11_TEXTURE2D_DESC texDesc;
810  dst_ctx->initial_pool_size = src_ctx->initial_pool_size;
811  AVD3D11VAFramesContext *dst_hwctx = dst_ctx->hwctx;
812  dst_hwctx->texture_infos = av_calloc(src_hwctx->nb_surfaces,
813  sizeof(*dst_hwctx->texture_infos));
814  if (!dst_hwctx->texture_infos)
815  return AVERROR(ENOMEM);
816  if (src_hwctx->frame_type & MFX_MEMTYPE_SHARED_RESOURCE)
817  dst_hwctx->MiscFlags = D3D11_RESOURCE_MISC_SHARED;
818  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
819  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
820  dst_hwctx->texture_infos[i].texture = (ID3D11Texture2D*)pair->first;
821  dst_hwctx->texture_infos[i].index = pair->second == (mfxMemId)MFX_INFINITE ? (intptr_t)0 : (intptr_t)pair->second;
822  }
823  ID3D11Texture2D_GetDesc(dst_hwctx->texture_infos[0].texture, &texDesc);
824  dst_hwctx->BindFlags = texDesc.BindFlags;
825  }
826  break;
827 #endif
828 #if CONFIG_DXVA2
830  {
831  AVDXVA2FramesContext *dst_hwctx = dst_ctx->hwctx;
832  dst_hwctx->surfaces = av_calloc(src_hwctx->nb_surfaces,
833  sizeof(*dst_hwctx->surfaces));
834  if (!dst_hwctx->surfaces)
835  return AVERROR(ENOMEM);
836  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
837  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
838  dst_hwctx->surfaces[i] = (IDirect3DSurface9*)pair->first;
839  }
840  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
841  if (src_hwctx->frame_type == MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
842  dst_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
843  else
844  dst_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
845  }
846  break;
847 #endif
848  default:
849  return AVERROR(ENOSYS);
850  }
851 
852  return 0;
853 }
854 
856  AVFrame *dst, const AVFrame *src, int flags)
857 {
858  QSVFramesContext *s = ctx->internal->priv;
859  mfxFrameSurface1 *surf = (mfxFrameSurface1*)src->data[3];
860  AVHWFramesContext *child_frames_ctx;
861  const AVPixFmtDescriptor *desc;
862  uint8_t *child_data;
863  AVFrame *dummy;
864  int ret = 0;
865 
866  if (!s->child_frames_ref)
867  return AVERROR(ENOSYS);
868  child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
869 
870  switch (child_frames_ctx->device_ctx->type) {
871 #if CONFIG_VAAPI
873  {
874  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
875  /* pair->first is *VASurfaceID while data[3] in vaapi frame is VASurfaceID, so
876  * we need this casting for vaapi.
877  * Add intptr_t to force cast from VASurfaceID(uint) type to pointer(long) type
878  * to avoid compile warning */
879  child_data = (uint8_t*)(intptr_t)*(VASurfaceID*)pair->first;
880  break;
881  }
882 #endif
883 #if CONFIG_D3D11VA
885  {
886  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
887  child_data = pair->first;
888  break;
889  }
890 #endif
891 #if CONFIG_DXVA2
893  {
894  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
895  child_data = pair->first;
896  break;
897  }
898 #endif
899  default:
900  return AVERROR(ENOSYS);
901  }
902 
903  if (dst->format == child_frames_ctx->format) {
904  ret = ff_hwframe_map_create(s->child_frames_ref,
905  dst, src, NULL, NULL);
906  if (ret < 0)
907  return ret;
908 
909  dst->width = src->width;
910  dst->height = src->height;
911 
912  if (child_frames_ctx->device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
913  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
914  dst->data[0] = pair->first;
915  dst->data[1] = pair->second == (mfxMemId)MFX_INFINITE ? (uint8_t *)0 : pair->second;
916  } else {
917  dst->data[3] = child_data;
918  }
919 
920  return 0;
921  }
922 
924  if (desc && desc->flags & AV_PIX_FMT_FLAG_HWACCEL) {
925  // This only supports mapping to software.
926  return AVERROR(ENOSYS);
927  }
928 
929  dummy = av_frame_alloc();
930  if (!dummy)
931  return AVERROR(ENOMEM);
932 
933  dummy->buf[0] = av_buffer_ref(src->buf[0]);
934  dummy->hw_frames_ctx = av_buffer_ref(s->child_frames_ref);
935  if (!dummy->buf[0] || !dummy->hw_frames_ctx)
936  goto fail;
937 
938  dummy->format = child_frames_ctx->format;
939  dummy->width = src->width;
940  dummy->height = src->height;
941 
942  if (child_frames_ctx->device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
943  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
944  dummy->data[0] = pair->first;
945  dummy->data[1] = pair->second == (mfxMemId)MFX_INFINITE ? (uint8_t *)0 : pair->second;
946  } else {
947  dummy->data[3] = child_data;
948  }
949 
950  ret = av_hwframe_map(dst, dummy, flags);
951 
952 fail:
954 
955  return ret;
956 }
957 
959  const AVFrame *src)
960 {
961  QSVFramesContext *s = ctx->internal->priv;
962  AVHWFramesContext *child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
963  int download = !!src->hw_frames_ctx;
964  mfxFrameSurface1 *surf = (mfxFrameSurface1*)(download ? src->data[3] : dst->data[3]);
965 
966  AVFrame *dummy;
967  int ret;
968 
969  dummy = av_frame_alloc();
970  if (!dummy)
971  return AVERROR(ENOMEM);
972 
973  dummy->format = child_frames_ctx->format;
974  dummy->width = src->width;
975  dummy->height = src->height;
976  dummy->buf[0] = download ? src->buf[0] : dst->buf[0];
977  dummy->data[3] = surf->Data.MemId;
978  dummy->hw_frames_ctx = s->child_frames_ref;
979 
980  ret = download ? av_hwframe_transfer_data(dst, dummy, 0) :
982 
983  dummy->buf[0] = NULL;
984  dummy->data[3] = NULL;
985  dummy->hw_frames_ctx = NULL;
986 
988 
989  return ret;
990 }
991 
992 static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
993 {
994  switch (frame->format) {
995  case AV_PIX_FMT_NV12:
996  case AV_PIX_FMT_P010:
997  surface->Data.Y = frame->data[0];
998  surface->Data.UV = frame->data[1];
999  break;
1000 
1001  case AV_PIX_FMT_YUV420P:
1002  surface->Data.Y = frame->data[0];
1003  surface->Data.U = frame->data[1];
1004  surface->Data.V = frame->data[2];
1005  break;
1006 
1007  case AV_PIX_FMT_BGRA:
1008  surface->Data.B = frame->data[0];
1009  surface->Data.G = frame->data[0] + 1;
1010  surface->Data.R = frame->data[0] + 2;
1011  surface->Data.A = frame->data[0] + 3;
1012  break;
1013 #if CONFIG_VAAPI
1014  case AV_PIX_FMT_YUYV422:
1015  surface->Data.Y = frame->data[0];
1016  surface->Data.U = frame->data[0] + 1;
1017  surface->Data.V = frame->data[0] + 3;
1018  break;
1019 
1020  case AV_PIX_FMT_Y210:
1021  surface->Data.Y16 = (mfxU16 *)frame->data[0];
1022  surface->Data.U16 = (mfxU16 *)frame->data[0] + 1;
1023  surface->Data.V16 = (mfxU16 *)frame->data[0] + 3;
1024  break;
1025 #endif
1026  default:
1027  return MFX_ERR_UNSUPPORTED;
1028  }
1029  surface->Data.Pitch = frame->linesize[0];
1030  surface->Data.TimeStamp = frame->pts;
1031 
1032  return 0;
1033 }
1034 
1036 {
1037  QSVFramesContext *s = ctx->internal->priv;
1038  atomic_int *inited = upload ? &s->session_upload_init : &s->session_download_init;
1039  mfxSession *session = upload ? &s->session_upload : &s->session_download;
1040  int ret = 0;
1041 
1042  if (atomic_load(inited))
1043  return 0;
1044 
1045 #if HAVE_PTHREADS
1046  pthread_mutex_lock(&s->session_lock);
1047 #endif
1048 
1049  if (!atomic_load(inited)) {
1050  ret = qsv_init_internal_session(ctx, session, upload);
1051  atomic_store(inited, 1);
1052  }
1053 
1054 #if HAVE_PTHREADS
1055  pthread_mutex_unlock(&s->session_lock);
1056 #endif
1057 
1058  return ret;
1059 }
1060 
1062  const AVFrame *src)
1063 {
1064  QSVFramesContext *s = ctx->internal->priv;
1065  mfxFrameSurface1 out = {{ 0 }};
1066  mfxFrameSurface1 *in = (mfxFrameSurface1*)src->data[3];
1067 
1068  mfxSyncPoint sync = NULL;
1069  mfxStatus err;
1070  int ret = 0;
1071  /* download to temp frame if the output is not padded as libmfx requires */
1072  AVFrame *tmp_frame = &s->realigned_download_frame;
1073  AVFrame *dst_frame;
1074  int realigned = 0;
1075 
1077  if (ret < 0)
1078  return ret;
1079 
1080  /* According to MSDK spec for mfxframeinfo, "Width must be a multiple of 16.
1081  * Height must be a multiple of 16 for progressive frame sequence and a
1082  * multiple of 32 otherwise.", so allign all frames to 16 before downloading. */
1083  if (dst->height & 15 || dst->linesize[0] & 15) {
1084  realigned = 1;
1085  if (tmp_frame->format != dst->format ||
1086  tmp_frame->width != FFALIGN(dst->linesize[0], 16) ||
1087  tmp_frame->height != FFALIGN(dst->height, 16)) {
1088  av_frame_unref(tmp_frame);
1089 
1090  tmp_frame->format = dst->format;
1091  tmp_frame->width = FFALIGN(dst->linesize[0], 16);
1092  tmp_frame->height = FFALIGN(dst->height, 16);
1093  ret = av_frame_get_buffer(tmp_frame, 0);
1094  if (ret < 0)
1095  return ret;
1096  }
1097  }
1098 
1099  dst_frame = realigned ? tmp_frame : dst;
1100 
1101  if (!s->session_download) {
1102  if (s->child_frames_ref)
1103  return qsv_transfer_data_child(ctx, dst_frame, src);
1104 
1105  av_log(ctx, AV_LOG_ERROR, "Surface download not possible\n");
1106  return AVERROR(ENOSYS);
1107  }
1108 
1109  out.Info = in->Info;
1110  map_frame_to_surface(dst_frame, &out);
1111 
1112  do {
1113  err = MFXVideoVPP_RunFrameVPPAsync(s->session_download, in, &out, NULL, &sync);
1114  if (err == MFX_WRN_DEVICE_BUSY)
1115  av_usleep(1);
1116  } while (err == MFX_WRN_DEVICE_BUSY);
1117 
1118  if (err < 0 || !sync) {
1119  av_log(ctx, AV_LOG_ERROR, "Error downloading the surface\n");
1120  return AVERROR_UNKNOWN;
1121  }
1122 
1123  do {
1124  err = MFXVideoCORE_SyncOperation(s->session_download, sync, 1000);
1125  } while (err == MFX_WRN_IN_EXECUTION);
1126  if (err < 0) {
1127  av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation: %d\n", err);
1128  return AVERROR_UNKNOWN;
1129  }
1130 
1131  if (realigned) {
1132  tmp_frame->width = dst->width;
1133  tmp_frame->height = dst->height;
1134  ret = av_frame_copy(dst, tmp_frame);
1135  tmp_frame->width = FFALIGN(dst->linesize[0], 16);
1136  tmp_frame->height = FFALIGN(dst->height, 16);
1137  if (ret < 0)
1138  return ret;
1139  }
1140 
1141  return 0;
1142 }
1143 
1145  const AVFrame *src)
1146 {
1147  QSVFramesContext *s = ctx->internal->priv;
1148  mfxFrameSurface1 in = {{ 0 }};
1149  mfxFrameSurface1 *out = (mfxFrameSurface1*)dst->data[3];
1150  mfxFrameInfo tmp_info;
1151 
1152  mfxSyncPoint sync = NULL;
1153  mfxStatus err;
1154  int ret = 0;
1155  /* make a copy if the input is not padded as libmfx requires */
1156  AVFrame *tmp_frame = &s->realigned_upload_frame;
1157  const AVFrame *src_frame;
1158  int realigned = 0;
1159 
1161  if (ret < 0)
1162  return ret;
1163 
1164  /* According to MSDK spec for mfxframeinfo, "Width must be a multiple of 16.
1165  * Height must be a multiple of 16 for progressive frame sequence and a
1166  * multiple of 32 otherwise.", so allign all frames to 16 before uploading. */
1167  if (src->height & 15 || src->linesize[0] & 15) {
1168  realigned = 1;
1169  if (tmp_frame->format != src->format ||
1170  tmp_frame->width != FFALIGN(src->width, 16) ||
1171  tmp_frame->height != FFALIGN(src->height, 16)) {
1172  av_frame_unref(tmp_frame);
1173 
1174  tmp_frame->format = src->format;
1175  tmp_frame->width = FFALIGN(src->width, 16);
1176  tmp_frame->height = FFALIGN(src->height, 16);
1177  ret = av_frame_get_buffer(tmp_frame, 0);
1178  if (ret < 0)
1179  return ret;
1180  }
1181  ret = av_frame_copy(tmp_frame, src);
1182  if (ret < 0) {
1183  av_frame_unref(tmp_frame);
1184  return ret;
1185  }
1186  ret = qsv_fill_border(tmp_frame, src);
1187  if (ret < 0) {
1188  av_frame_unref(tmp_frame);
1189  return ret;
1190  }
1191 
1192  tmp_info = out->Info;
1193  out->Info.CropW = FFMIN(out->Info.Width, tmp_frame->width);
1194  out->Info.CropH = FFMIN(out->Info.Height, tmp_frame->height);
1195  }
1196 
1197  src_frame = realigned ? tmp_frame : src;
1198 
1199  if (!s->session_upload) {
1200  if (s->child_frames_ref)
1201  return qsv_transfer_data_child(ctx, dst, src_frame);
1202 
1203  av_log(ctx, AV_LOG_ERROR, "Surface upload not possible\n");
1204  return AVERROR(ENOSYS);
1205  }
1206 
1207  in.Info = out->Info;
1208  map_frame_to_surface(src_frame, &in);
1209 
1210  do {
1211  err = MFXVideoVPP_RunFrameVPPAsync(s->session_upload, &in, out, NULL, &sync);
1212  if (err == MFX_WRN_DEVICE_BUSY)
1213  av_usleep(1);
1214  } while (err == MFX_WRN_DEVICE_BUSY);
1215 
1216  if (err < 0 || !sync) {
1217  av_log(ctx, AV_LOG_ERROR, "Error uploading the surface\n");
1218  return AVERROR_UNKNOWN;
1219  }
1220 
1221  do {
1222  err = MFXVideoCORE_SyncOperation(s->session_upload, sync, 1000);
1223  } while (err == MFX_WRN_IN_EXECUTION);
1224  if (err < 0) {
1225  av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation\n");
1226  return AVERROR_UNKNOWN;
1227  }
1228 
1229  if (realigned) {
1230  out->Info.CropW = tmp_info.CropW;
1231  out->Info.CropH = tmp_info.CropH;
1232  }
1233 
1234  return 0;
1235 }
1236 
1238  AVHWFramesContext *src_ctx, int flags)
1239 {
1240  QSVFramesContext *s = dst_ctx->internal->priv;
1241  AVQSVFramesContext *dst_hwctx = dst_ctx->hwctx;
1242  int i;
1243 
1244  if (src_ctx->initial_pool_size == 0) {
1245  av_log(dst_ctx, AV_LOG_ERROR, "Only fixed-size pools can be "
1246  "mapped to QSV frames.\n");
1247  return AVERROR(EINVAL);
1248  }
1249 
1250  switch (src_ctx->device_ctx->type) {
1251 #if CONFIG_VAAPI
1253  {
1254  AVVAAPIFramesContext *src_hwctx = src_ctx->hwctx;
1255  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1256  sizeof(*s->handle_pairs_internal));
1257  if (!s->handle_pairs_internal)
1258  return AVERROR(ENOMEM);
1259  s->surfaces_internal = av_calloc(src_hwctx->nb_surfaces,
1260  sizeof(*s->surfaces_internal));
1261  if (!s->surfaces_internal)
1262  return AVERROR(ENOMEM);
1263  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1264  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1265  s->handle_pairs_internal[i].first = src_hwctx->surface_ids + i;
1266  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1267  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1268  }
1269  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1270  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1271  }
1272  break;
1273 #endif
1274 #if CONFIG_D3D11VA
1276  {
1277  AVD3D11VAFramesContext *src_hwctx = src_ctx->hwctx;
1278  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1279  sizeof(*s->handle_pairs_internal));
1280  if (!s->handle_pairs_internal)
1281  return AVERROR(ENOMEM);
1282  s->surfaces_internal = av_calloc(src_ctx->initial_pool_size,
1283  sizeof(*s->surfaces_internal));
1284  if (!s->surfaces_internal)
1285  return AVERROR(ENOMEM);
1286  for (i = 0; i < src_ctx->initial_pool_size; i++) {
1287  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1288  s->handle_pairs_internal[i].first = (mfxMemId)src_hwctx->texture_infos[i].texture;
1289  if (src_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
1290  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1291  } else {
1292  s->handle_pairs_internal[i].second = (mfxMemId)src_hwctx->texture_infos[i].index;
1293  }
1294  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1295  }
1296  dst_hwctx->nb_surfaces = src_ctx->initial_pool_size;
1297  if (src_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
1298  dst_hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
1299  } else {
1300  dst_hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1301  }
1302  }
1303  break;
1304 #endif
1305 #if CONFIG_DXVA2
1307  {
1308  AVDXVA2FramesContext *src_hwctx = src_ctx->hwctx;
1309  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1310  sizeof(*s->handle_pairs_internal));
1311  if (!s->handle_pairs_internal)
1312  return AVERROR(ENOMEM);
1313  s->surfaces_internal = av_calloc(src_hwctx->nb_surfaces,
1314  sizeof(*s->surfaces_internal));
1315  if (!s->surfaces_internal)
1316  return AVERROR(ENOMEM);
1317  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1318  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1319  s->handle_pairs_internal[i].first = (mfxMemId)src_hwctx->surfaces[i];
1320  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1321  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1322  }
1323  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1324  if (src_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
1325  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
1326  else
1327  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1328  }
1329  break;
1330 #endif
1331  default:
1332  return AVERROR(ENOSYS);
1333  }
1334 
1335  dst_hwctx->surfaces = s->surfaces_internal;
1336 
1337  return 0;
1338 }
1339 
1340 static int qsv_map_to(AVHWFramesContext *dst_ctx,
1341  AVFrame *dst, const AVFrame *src, int flags)
1342 {
1343  AVQSVFramesContext *hwctx = dst_ctx->hwctx;
1344  int i, err, index = -1;
1345 
1346  for (i = 0; i < hwctx->nb_surfaces && index < 0; i++) {
1347  switch(src->format) {
1348 #if CONFIG_VAAPI
1349  case AV_PIX_FMT_VAAPI:
1350  {
1351  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1352  if (*(VASurfaceID*)pair->first == (VASurfaceID)src->data[3]) {
1353  index = i;
1354  break;
1355  }
1356  }
1357 #endif
1358 #if CONFIG_D3D11VA
1359  case AV_PIX_FMT_D3D11:
1360  {
1361  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1362  if (pair->first == src->data[0]
1363  && (pair->second == src->data[1]
1364  || (pair->second == (mfxMemId)MFX_INFINITE && src->data[1] == (uint8_t *)0))) {
1365  index = i;
1366  break;
1367  }
1368  }
1369 #endif
1370 #if CONFIG_DXVA2
1371  case AV_PIX_FMT_DXVA2_VLD:
1372  {
1373  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1374  if (pair->first == src->data[3]) {
1375  index = i;
1376  break;
1377  }
1378  }
1379 #endif
1380  }
1381  }
1382  if (index < 0) {
1383  av_log(dst_ctx, AV_LOG_ERROR, "Trying to map from a surface which "
1384  "is not in the mapped frames context.\n");
1385  return AVERROR(EINVAL);
1386  }
1387 
1389  dst, src, NULL, NULL);
1390  if (err)
1391  return err;
1392 
1393  dst->width = src->width;
1394  dst->height = src->height;
1395  dst->data[3] = (uint8_t*)&hwctx->surfaces[index];
1396 
1397  return 0;
1398 }
1399 
1401  const void *hwconfig,
1402  AVHWFramesConstraints *constraints)
1403 {
1404  int i;
1405 
1407  sizeof(*constraints->valid_sw_formats));
1408  if (!constraints->valid_sw_formats)
1409  return AVERROR(ENOMEM);
1410 
1411  for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++)
1412  constraints->valid_sw_formats[i] = supported_pixel_formats[i].pix_fmt;
1414 
1415  constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
1416  if (!constraints->valid_hw_formats)
1417  return AVERROR(ENOMEM);
1418 
1419  constraints->valid_hw_formats[0] = AV_PIX_FMT_QSV;
1420  constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
1421 
1422  return 0;
1423 }
1424 
1426 {
1427  AVQSVDeviceContext *hwctx = ctx->hwctx;
1428  QSVDevicePriv *priv = ctx->user_opaque;
1429 
1430  if (hwctx->session)
1431  MFXClose(hwctx->session);
1432 
1434  av_freep(&priv);
1435 }
1436 
1437 static mfxIMPL choose_implementation(const char *device, enum AVHWDeviceType child_device_type)
1438 {
1439  static const struct {
1440  const char *name;
1441  mfxIMPL impl;
1442  } impl_map[] = {
1443  { "auto", MFX_IMPL_AUTO },
1444  { "sw", MFX_IMPL_SOFTWARE },
1445  { "hw", MFX_IMPL_HARDWARE },
1446  { "auto_any", MFX_IMPL_AUTO_ANY },
1447  { "hw_any", MFX_IMPL_HARDWARE_ANY },
1448  { "hw2", MFX_IMPL_HARDWARE2 },
1449  { "hw3", MFX_IMPL_HARDWARE3 },
1450  { "hw4", MFX_IMPL_HARDWARE4 },
1451  };
1452 
1453  mfxIMPL impl = MFX_IMPL_AUTO_ANY;
1454  int i;
1455 
1456  if (device) {
1457  for (i = 0; i < FF_ARRAY_ELEMS(impl_map); i++)
1458  if (!strcmp(device, impl_map[i].name)) {
1459  impl = impl_map[i].impl;
1460  break;
1461  }
1462  if (i == FF_ARRAY_ELEMS(impl_map))
1463  impl = strtol(device, NULL, 0);
1464  }
1465 
1466  if (impl != MFX_IMPL_SOFTWARE) {
1467  if (child_device_type == AV_HWDEVICE_TYPE_D3D11VA)
1468  impl |= MFX_IMPL_VIA_D3D11;
1469  else if (child_device_type == AV_HWDEVICE_TYPE_DXVA2)
1470  impl |= MFX_IMPL_VIA_D3D9;
1471  }
1472 
1473  return impl;
1474 }
1475 
1477  mfxIMPL implementation,
1478  AVHWDeviceContext *child_device_ctx,
1479  int flags)
1480 {
1481  AVQSVDeviceContext *hwctx = ctx->hwctx;
1482 
1483  mfxVersion ver = { { 3, 1 } };
1484  mfxHDL handle;
1485  mfxHandleType handle_type;
1486  mfxStatus err;
1487  int ret;
1488 
1489  switch (child_device_ctx->type) {
1490 #if CONFIG_VAAPI
1492  {
1493  AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1494  handle_type = MFX_HANDLE_VA_DISPLAY;
1495  handle = (mfxHDL)child_device_hwctx->display;
1496  }
1497  break;
1498 #endif
1499 #if CONFIG_D3D11VA
1501  {
1502  AVD3D11VADeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1503  handle_type = MFX_HANDLE_D3D11_DEVICE;
1504  handle = (mfxHDL)child_device_hwctx->device;
1505  }
1506  break;
1507 #endif
1508 #if CONFIG_DXVA2
1510  {
1511  AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1512  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
1513  handle = (mfxHDL)child_device_hwctx->devmgr;
1514  }
1515  break;
1516 #endif
1517  default:
1518  ret = AVERROR(ENOSYS);
1519  goto fail;
1520  }
1521 
1522  err = MFXInit(implementation, &ver, &hwctx->session);
1523  if (err != MFX_ERR_NONE) {
1524  av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session: "
1525  "%d.\n", err);
1526  ret = AVERROR_UNKNOWN;
1527  goto fail;
1528  }
1529 
1530  err = MFXQueryVersion(hwctx->session, &ver);
1531  if (err != MFX_ERR_NONE) {
1532  av_log(ctx, AV_LOG_ERROR, "Error querying an MFX session: %d.\n", err);
1533  ret = AVERROR_UNKNOWN;
1534  goto fail;
1535  }
1536 
1538  "Initialize MFX session: API version is %d.%d, implementation version is %d.%d\n",
1539  MFX_VERSION_MAJOR, MFX_VERSION_MINOR, ver.Major, ver.Minor);
1540 
1541  MFXClose(hwctx->session);
1542 
1543  err = MFXInit(implementation, &ver, &hwctx->session);
1544  if (err != MFX_ERR_NONE) {
1546  "Error initializing an MFX session: %d.\n", err);
1547  ret = AVERROR_UNKNOWN;
1548  goto fail;
1549  }
1550 
1551  err = MFXVideoCORE_SetHandle(hwctx->session, handle_type, handle);
1552  if (err != MFX_ERR_NONE) {
1553  av_log(ctx, AV_LOG_ERROR, "Error setting child device handle: "
1554  "%d\n", err);
1555  ret = AVERROR_UNKNOWN;
1556  goto fail;
1557  }
1558 
1559  return 0;
1560 
1561 fail:
1562  if (hwctx->session)
1563  MFXClose(hwctx->session);
1564  return ret;
1565 }
1566 
1568  AVHWDeviceContext *child_device_ctx,
1569  AVDictionary *opts, int flags)
1570 {
1571  mfxIMPL impl;
1572  impl = choose_implementation("hw_any", child_device_ctx->type);
1573  return qsv_device_derive_from_child(ctx, impl,
1574  child_device_ctx, flags);
1575 }
1576 
1577 static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
1578  AVDictionary *opts, int flags)
1579 {
1580  QSVDevicePriv *priv;
1581  enum AVHWDeviceType child_device_type;
1582  AVHWDeviceContext *child_device;
1583  AVDictionary *child_device_opts;
1584  AVDictionaryEntry *e;
1585 
1586  mfxIMPL impl;
1587  int ret;
1588 
1589  priv = av_mallocz(sizeof(*priv));
1590  if (!priv)
1591  return AVERROR(ENOMEM);
1592 
1593  ctx->user_opaque = priv;
1594  ctx->free = qsv_device_free;
1595 
1596  e = av_dict_get(opts, "child_device_type", NULL, 0);
1597  if (e) {
1598  child_device_type = av_hwdevice_find_type_by_name(e->value);
1599  if (child_device_type == AV_HWDEVICE_TYPE_NONE) {
1600  av_log(ctx, AV_LOG_ERROR, "Unknown child device type "
1601  "\"%s\".\n", e->value);
1602  return AVERROR(EINVAL);
1603  }
1604  } else if (CONFIG_VAAPI) {
1605  child_device_type = AV_HWDEVICE_TYPE_VAAPI;
1606  } else if (CONFIG_DXVA2) {
1608  "WARNING: defaulting child_device_type to AV_HWDEVICE_TYPE_DXVA2 for compatibility "
1609  "with old commandlines. This behaviour will be removed "
1610  "in the future. Please explicitly set device type via \"-init_hw_device\" option.\n");
1611  child_device_type = AV_HWDEVICE_TYPE_DXVA2;
1612  } else if (CONFIG_D3D11VA) {
1613  child_device_type = AV_HWDEVICE_TYPE_D3D11VA;
1614  } else {
1615  av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
1616  return AVERROR(ENOSYS);
1617  }
1618 
1619  child_device_opts = NULL;
1620  switch (child_device_type) {
1621 #if CONFIG_VAAPI
1623  {
1624  // libmfx does not actually implement VAAPI properly, rather it
1625  // depends on the specific behaviour of a matching iHD driver when
1626  // used on recent Intel hardware. Set options to the VAAPI device
1627  // creation so that we should pick a usable setup by default if
1628  // possible, even when multiple devices and drivers are available.
1629  av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
1630  av_dict_set(&child_device_opts, "driver", "iHD", 0);
1631  }
1632  break;
1633 #endif
1634 #if CONFIG_D3D11VA
1636  break;
1637 #endif
1638 #if CONFIG_DXVA2
1640  break;
1641 #endif
1642  default:
1643  {
1644  av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
1645  return AVERROR(ENOSYS);
1646  }
1647  break;
1648  }
1649 
1650  e = av_dict_get(opts, "child_device", NULL, 0);
1651  ret = av_hwdevice_ctx_create(&priv->child_device_ctx, child_device_type,
1652  e ? e->value : NULL, child_device_opts, 0);
1653 
1654  av_dict_free(&child_device_opts);
1655  if (ret < 0)
1656  return ret;
1657 
1658  child_device = (AVHWDeviceContext*)priv->child_device_ctx->data;
1659 
1660  impl = choose_implementation(device, child_device_type);
1661 
1662  return qsv_device_derive_from_child(ctx, impl, child_device, 0);
1663 }
1664 
1667  .name = "QSV",
1668 
1669  .device_hwctx_size = sizeof(AVQSVDeviceContext),
1670  .device_priv_size = sizeof(QSVDeviceContext),
1671  .frames_hwctx_size = sizeof(AVQSVFramesContext),
1672  .frames_priv_size = sizeof(QSVFramesContext),
1673 
1674  .device_create = qsv_device_create,
1675  .device_derive = qsv_device_derive,
1676  .device_init = qsv_device_init,
1677  .frames_get_constraints = qsv_frames_get_constraints,
1678  .frames_init = qsv_frames_init,
1679  .frames_uninit = qsv_frames_uninit,
1680  .frames_get_buffer = qsv_get_buffer,
1681  .transfer_get_formats = qsv_transfer_get_formats,
1682  .transfer_data_to = qsv_transfer_data_to,
1683  .transfer_data_from = qsv_transfer_data_from,
1684  .map_to = qsv_map_to,
1685  .map_from = qsv_map_from,
1686  .frames_derive_to = qsv_frames_derive_to,
1687  .frames_derive_from = qsv_frames_derive_from,
1688 
1689  .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_QSV, AV_PIX_FMT_NONE },
1690 };
formats
formats
Definition: signature.h:48
pthread_mutex_t
_fmutex pthread_mutex_t
Definition: os2threads.h:53
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
qsv_transfer_data_child
static int qsv_transfer_data_child(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:958
AVQSVFramesContext::frame_type
int frame_type
A combination of MFX_MEMTYPE_* describing the frame pool.
Definition: hwcontext_qsv.h:49
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
atomic_store
#define atomic_store(object, desired)
Definition: stdatomic.h:85
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
out
FILE * out
Definition: movenc.c:54
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:254
comp
static void comp(unsigned char *dst, ptrdiff_t dst_stride, unsigned char *src, ptrdiff_t src_stride, int add)
Definition: eamad.c:86
QSVFramesContext::child_frames_ref
AVBufferRef * child_frames_ref
Definition: hwcontext_qsv.c:82
qsv_transfer_data_to
static int qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:1144
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2662
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
supported_pixel_formats
static const struct @311 supported_pixel_formats[]
pthread_mutex_init
static av_always_inline int pthread_mutex_init(pthread_mutex_t *mutex, const pthread_mutexattr_t *attr)
Definition: os2threads.h:104
qsv_map_from
static int qsv_map_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_qsv.c:855
qsv_fourcc_from_pix_fmt
static uint32_t qsv_fourcc_from_pix_fmt(enum AVPixelFormat pix_fmt)
Definition: hwcontext_qsv.c:148
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:209
qsv_fill_border
static int qsv_fill_border(AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:175
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:111
QSVDeviceContext::ver
mfxVersion ver
Definition: hwcontext_qsv.c:66
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:334
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:325
pixdesc.h
AVFrame::width
int width
Definition: frame.h:397
AVQSVDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
AVD3D11VAFramesContext::MiscFlags
UINT MiscFlags
D3D11_TEXTURE2D_DESC.MiscFlags used for texture creation.
Definition: hwcontext_d3d11va.h:166
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:248
qsv_device_derive
static int qsv_device_derive(AVHWDeviceContext *ctx, AVHWDeviceContext *child_device_ctx, AVDictionary *opts, int flags)
Definition: hwcontext_qsv.c:1567
AVDXVA2FramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_dxva2.h:46
qsv_frames_derive_from
static int qsv_frames_derive_from(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_qsv.c:783
AV_HWDEVICE_TYPE_NONE
@ AV_HWDEVICE_TYPE_NONE
Definition: hwcontext.h:28
av_hwframe_map
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
Definition: hwcontext.c:790
qsv_init_surface
static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
Definition: hwcontext_qsv.c:474
data
const char data[16]
Definition: mxf.c:143
atomic_int
intptr_t atomic_int
Definition: stdatomic.h:55
choose_implementation
static mfxIMPL choose_implementation(const char *device, enum AVHWDeviceType child_device_type)
Definition: hwcontext_qsv.c:1437
QSVDeviceContext
Definition: hwcontext_qsv.c:63
av_hwdevice_find_type_by_name
enum AVHWDeviceType av_hwdevice_find_type_by_name(const char *name)
Look up an AVHWDeviceType by name.
Definition: hwcontext.c:83
AVDXVA2DeviceContext::devmgr
IDirect3DDeviceManager9 * devmgr
Definition: hwcontext_dxva2.h:40
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVVAAPIDeviceContext::display
VADisplay display
The VADisplay handle, to be filled by the user.
Definition: hwcontext_vaapi.h:72
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:95
AVHWFramesContext::internal
AVHWFramesInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:134
AVDictionary
Definition: dict.c:30
ff_hwframe_map_create
int ff_hwframe_map_create(AVBufferRef *hwframe_ref, AVFrame *dst, const AVFrame *src, void(*unmap)(AVHWFramesContext *ctx, HWMapDescriptor *hwmap), void *priv)
Definition: hwcontext.c:738
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AVHWFramesConstraints::valid_hw_formats
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:458
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:229
fourcc
uint32_t fourcc
Definition: hwcontext_qsv.c:100
av_hwdevice_ctx_init
int av_hwdevice_ctx_init(AVBufferRef *ref)
Finalize the device context before use.
Definition: hwcontext.c:201
AVFrame::buf
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:525
QSVDeviceContext::handle_type
mfxHandleType handle_type
Definition: hwcontext_qsv.c:65
qsv_transfer_data_from
static int qsv_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:1061
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:346
AVHWFramesConstraints
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:453
QSVDevicePriv
Definition: hwcontext_qsv.c:59
AVD3D11VAFramesContext::BindFlags
UINT BindFlags
D3D11_TEXTURE2D_DESC.BindFlags used for texture creation.
Definition: hwcontext_d3d11va.h:160
AVVAAPIFramesContext::surface_ids
VASurfaceID * surface_ids
The surfaces IDs of all surfaces in the pool after creation.
Definition: hwcontext_vaapi.h:101
AVHWFramesInternal::priv
void * priv
Definition: hwcontext_internal.h:116
AVD3D11FrameDescriptor::texture
ID3D11Texture2D * texture
The texture in which the frame is located.
Definition: hwcontext_d3d11va.h:117
QSVDeviceContext::child_device_type
enum AVHWDeviceType child_device_type
Definition: hwcontext_qsv.c:69
qsv_init_child_ctx
static int qsv_init_child_ctx(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:329
fail
#define fail()
Definition: checkasm.h:131
AV_PIX_FMT_FLAG_HWACCEL
#define AV_PIX_FMT_FLAG_HWACCEL
Pixel format is an HW accelerated format.
Definition: pixdesc.h:128
dummy
int dummy
Definition: motion.c:65
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
qsv_frames_get_constraints
static int qsv_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
Definition: hwcontext_qsv.c:1400
av_buffer_pool_init2
AVBufferPool * av_buffer_pool_init2(size_t size, void *opaque, AVBufferRef *(*alloc)(void *opaque, size_t size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
Definition: buffer.c:259
QSVFramesContext::session_download_init
atomic_int session_download_init
Definition: hwcontext_qsv.c:75
qsv_frames_derive_to
static int qsv_frames_derive_to(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_qsv.c:1237
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:99
frame_free
static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
Definition: hwcontext_qsv.c:583
AV_PIX_FMT_Y210
#define AV_PIX_FMT_Y210
Definition: pixfmt.h:458
HWContextType::type
enum AVHWDeviceType type
Definition: hwcontext_internal.h:30
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVHWFramesContext::height
int height
Definition: hwcontext.h:229
av_hwdevice_ctx_alloc
AVBufferRef * av_hwdevice_ctx_alloc(enum AVHWDeviceType type)
Allocate an AVHWDeviceContext for a given hardware type.
Definition: hwcontext.c:143
AVHWFramesConstraints::valid_sw_formats
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:465
QSVFramesContext::ext_buffers
mfxExtBuffer * ext_buffers[1]
Definition: hwcontext_qsv.c:93
QSVFramesContext::session_upload_init
atomic_int session_upload_init
Definition: hwcontext_qsv.c:77
frame_alloc
static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
Definition: hwcontext_qsv.c:555
av_dict_get
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:40
av_memcpy_backptr
void av_memcpy_backptr(uint8_t *dst, int back, int cnt)
Overlapping memcpy() implementation.
Definition: mem.c:455
av_buffer_pool_get
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:387
QSVDevicePriv::child_device_ctx
AVBufferRef * child_device_ctx
Definition: hwcontext_qsv.c:60
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:127
s
#define s(width, name)
Definition: cbs_vp9.c:256
AVD3D11VADeviceContext::device
ID3D11Device * device
Device used for texture creation and access.
Definition: hwcontext_d3d11va.h:56
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:50
AVHWDeviceType
AVHWDeviceType
Definition: hwcontext.h:27
QSVDeviceContext::handle
mfxHDL handle
Definition: hwcontext_qsv.c:64
QSVFramesContext::mem_ids
mfxMemId * mem_ids
Definition: hwcontext_qsv.c:88
ctx
AVFormatContext * ctx
Definition: movenc.c:48
AVDXVA2FramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_dxva2.h:59
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
av_usleep
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
atomic_load
#define atomic_load(object)
Definition: stdatomic.h:93
if
if(ret)
Definition: filter_design.txt:179
ff_hwcontext_type_qsv
const HWContextType ff_hwcontext_type_qsv
Definition: hwcontext_qsv.c:1665
opts
AVDictionary * opts
Definition: movenc.c:50
AVD3D11VAFramesContext::texture_infos
AVD3D11FrameDescriptor * texture_infos
In case if texture structure member above is not NULL contains the same texture pointer for all eleme...
Definition: hwcontext_d3d11va.h:175
AVQSVFramesContext::surfaces
mfxFrameSurface1 * surfaces
Definition: hwcontext_qsv.h:43
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
qsv_frames_uninit
static void qsv_frames_uninit(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:279
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AVComponentDescriptor
Definition: pixdesc.h:30
AV_HWDEVICE_TYPE_DXVA2
@ AV_HWDEVICE_TYPE_DXVA2
Definition: hwcontext.h:32
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
MFX_IMPL_VIA_MASK
#define MFX_IMPL_VIA_MASK(impl)
Definition: hwcontext_qsv.c:57
qsv_internal_session_check_init
static int qsv_internal_session_check_init(AVHWFramesContext *ctx, int upload)
Definition: hwcontext_qsv.c:1035
qsv_frames_init
static int qsv_frames_init(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:686
time.h
AV_PIX_FMT_QSV
@ AV_PIX_FMT_QSV
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:212
map_frame_to_surface
static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
Definition: hwcontext_qsv.c:992
frame_unlock
static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: hwcontext_qsv.c:593
index
int index
Definition: gxfenc.c:89
pthread_mutex_unlock
#define pthread_mutex_unlock(a)
Definition: ffprobe.c:77
AVD3D11VAFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_d3d11va.h:131
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
QSVFramesContext::realigned_upload_frame
AVFrame realigned_upload_frame
Definition: hwcontext_qsv.c:94
qsv_init_internal_session
static int qsv_init_internal_session(AVHWFramesContext *ctx, mfxSession *session, int upload)
Definition: hwcontext_qsv.c:610
hwcontext_dxva2.h
QSVFramesContext::opaque_alloc
mfxExtOpaqueSurfaceAlloc opaque_alloc
Definition: hwcontext_qsv.c:92
qsv_get_buffer
static int qsv_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
Definition: hwcontext_qsv.c:751
AVDXVA2FramesContext::surface_type
DWORD surface_type
The surface type (e.g.
Definition: hwcontext_dxva2.h:51
av_frame_copy
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:764
size
int size
Definition: twinvq_data.h:10344
QSVFramesContext::nb_surfaces_used
int nb_surfaces_used
Definition: hwcontext_qsv.c:85
qsv_device_free
static void qsv_device_free(AVHWDeviceContext *ctx)
Definition: hwcontext_qsv.c:1425
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:412
ff_qsv_get_surface_base_handle
int ff_qsv_get_surface_base_handle(mfxFrameSurface1 *surf, enum AVHWDeviceType base_dev_type, void **base_handle)
Caller needs to allocate enough space for base_handle pointer.
Definition: hwcontext_qsv.c:121
qsv_transfer_get_formats
static int qsv_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
Definition: hwcontext_qsv.c:765
buffer.h
qsv_device_derive_from_child
static int qsv_device_derive_from_child(AVHWDeviceContext *ctx, mfxIMPL implementation, AVHWDeviceContext *child_device_ctx, int flags)
Definition: hwcontext_qsv.c:1476
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:305
av_dict_free
void av_dict_free(AVDictionary **pm)
Free all the memory allocated for an AVDictionary struct and all keys and values.
Definition: dict.c:203
AVQSVFramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_qsv.h:44
frame_get_hdl
static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
Definition: hwcontext_qsv.c:598
AV_PIX_FMT_VAAPI
@ AV_PIX_FMT_VAAPI
Hardware acceleration through VA-API, data[3] contains a VASurfaceID.
Definition: pixfmt.h:119
AV_HWDEVICE_TYPE_VAAPI
@ AV_HWDEVICE_TYPE_VAAPI
Definition: hwcontext.h:31
pthread_mutex_destroy
static av_always_inline int pthread_mutex_destroy(pthread_mutex_t *mutex)
Definition: os2threads.h:112
av_image_get_linesize
int av_image_get_linesize(enum AVPixelFormat pix_fmt, int width, int plane)
Compute the size of an image line with format pix_fmt and width width for the plane plane.
Definition: imgutils.c:76
hwcontext_qsv.h
qsv_device_init
static int qsv_device_init(AVHWDeviceContext *ctx)
Definition: hwcontext_qsv.c:223
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
AVDXVA2DeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_dxva2.h:39
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:31
common.h
AVD3D11VADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_d3d11va.h:45
QSVFramesContext::handle_pairs_internal
mfxHDLPair * handle_pairs_internal
Definition: hwcontext_qsv.c:84
AVD3D11FrameDescriptor::index
intptr_t index
The index into the array texture element representing the frame, or 0 if the texture is not an array ...
Definition: hwcontext_d3d11va.h:125
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
QSVFramesContext::surface_ptrs
mfxFrameSurface1 ** surface_ptrs
Definition: hwcontext_qsv.c:90
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:477
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:264
QSVFramesContext::session_download
mfxSession session_download
Definition: hwcontext_qsv.c:74
AVDXVA2FramesContext::surfaces
IDirect3DSurface9 ** surfaces
The surface pool.
Definition: hwcontext_dxva2.h:58
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:272
AVHWFrameTransferDirection
AVHWFrameTransferDirection
Definition: hwcontext.h:415
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
hwcontext_vaapi.h
qsv_map_to
static int qsv_map_to(AVHWFramesContext *dst_ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_qsv.c:1340
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
ret
ret
Definition: filter_design.txt:187
pix_fmt
enum AVPixelFormat pix_fmt
Definition: hwcontext_qsv.c:99
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:79
pixfmt.h
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVHWFramesContext::device_ctx
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:149
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:162
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:611
QSVDeviceContext::impl
mfxIMPL impl
Definition: hwcontext_qsv.c:67
QSVFramesContext::realigned_download_frame
AVFrame realigned_download_frame
Definition: hwcontext_qsv.c:95
av_hwframe_transfer_data
int av_hwframe_transfer_data(AVFrame *dst, const AVFrame *src, int flags)
Copy data to or from a hw surface.
Definition: hwcontext.c:444
frame_lock
static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: hwcontext_qsv.c:588
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:659
AV_HWDEVICE_TYPE_QSV
@ AV_HWDEVICE_TYPE_QSV
Definition: hwcontext.h:33
qsv_pool_release_dummy
static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
Definition: hwcontext_qsv.c:310
AVFrame::height
int height
Definition: frame.h:397
QSVDeviceContext::child_pix_fmt
enum AVPixelFormat child_pix_fmt
Definition: hwcontext_qsv.c:70
AVVAAPIFramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_vaapi.h:102
AVQSVDeviceContext::session
mfxSession session
Definition: hwcontext_qsv.h:36
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
QSVFramesContext::session_upload
mfxSession session_upload
Definition: hwcontext_qsv.c:76
qsv_device_create
static int qsv_device_create(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)
Definition: hwcontext_qsv.c:1577
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:455
AVQSVFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:42
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:199
desc
const char * desc
Definition: libsvtav1.c:83
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
hwcontext_internal.h
AVVAAPIFramesContext
VAAPI-specific data associated with a frame pool.
Definition: hwcontext_vaapi.h:88
QSVFramesContext::surfaces_internal
mfxFrameSurface1 * surfaces_internal
Definition: hwcontext_qsv.c:83
AVDictionaryEntry
Definition: dict.h:79
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
QSVFramesContext
Definition: qsv_internal.h:105
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:70
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
imgutils.h
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:561
hwcontext.h
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:370
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
HWContextType
Definition: hwcontext_internal.h:29
qsv_pool_alloc
static AVBufferRef * qsv_pool_alloc(void *opaque, size_t size)
Definition: hwcontext_qsv.c:314
ID3D11Device
void ID3D11Device
Definition: nvenc.h:28
AVVAAPIDeviceContext
VAAPI connection details.
Definition: hwcontext_vaapi.h:68
AVDictionaryEntry::value
char * value
Definition: dict.h:81
hwcontext_d3d11va.h
qsv_init_pool
static int qsv_init_pool(AVHWFramesContext *ctx, uint32_t fourcc)
Definition: hwcontext_qsv.c:510
pthread_mutex_lock
#define pthread_mutex_lock(a)
Definition: ffprobe.c:73