FFmpeg
qsvvpp.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 /**
20  * @file
21  * Intel Quick Sync Video VPP base function
22  */
23 
24 #include "libavutil/common.h"
25 #include "libavutil/mathematics.h"
26 #include "libavutil/time.h"
27 #include "libavutil/pixdesc.h"
28 
29 #include "internal.h"
30 #include "qsvvpp.h"
31 #include "video.h"
32 
33 #if QSV_ONEVPL
34 #include <mfxdispatcher.h>
35 #else
36 #define MFXUnload(a) do { } while(0)
37 #endif
38 
39 #define IS_VIDEO_MEMORY(mode) (mode & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | \
40  MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET))
41 #if QSV_HAVE_OPAQUE
42 #define IS_OPAQUE_MEMORY(mode) (mode & MFX_MEMTYPE_OPAQUE_FRAME)
43 #endif
44 #define IS_SYSTEM_MEMORY(mode) (mode & MFX_MEMTYPE_SYSTEM_MEMORY)
45 #define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
46 
47 #define QSV_HAVE_AUDIO !QSV_ONEVPL
48 
49 static const AVRational default_tb = { 1, 90000 };
50 
51 typedef struct QSVAsyncFrame {
52  mfxSyncPoint sync;
53  QSVFrame *frame;
55 
56 static const struct {
58  const char *desc;
59 } qsv_iopatterns[] = {
60  {MFX_IOPATTERN_IN_VIDEO_MEMORY, "input is video memory surface" },
61  {MFX_IOPATTERN_IN_SYSTEM_MEMORY, "input is system memory surface" },
62 #if QSV_HAVE_OPAQUE
63  {MFX_IOPATTERN_IN_OPAQUE_MEMORY, "input is opaque memory surface" },
64 #endif
65  {MFX_IOPATTERN_OUT_VIDEO_MEMORY, "output is video memory surface" },
66  {MFX_IOPATTERN_OUT_SYSTEM_MEMORY, "output is system memory surface" },
67 #if QSV_HAVE_OPAQUE
68  {MFX_IOPATTERN_OUT_OPAQUE_MEMORY, "output is opaque memory surface" },
69 #endif
70 };
71 
73  const char *extra_string)
74 {
75  const char *desc = NULL;
76 
77  for (int i = 0; i < FF_ARRAY_ELEMS(qsv_iopatterns); i++) {
79  desc = qsv_iopatterns[i].desc;
80  }
81  }
82  if (!desc)
83  desc = "unknown iopattern";
84 
85  av_log(log_ctx, AV_LOG_VERBOSE, "%s: %s\n", extra_string, desc);
86  return 0;
87 }
88 
89 static const struct {
90  mfxStatus mfxerr;
91  int averr;
92  const char *desc;
93 } qsv_errors[] = {
94  { MFX_ERR_NONE, 0, "success" },
95  { MFX_ERR_UNKNOWN, AVERROR_UNKNOWN, "unknown error" },
96  { MFX_ERR_NULL_PTR, AVERROR(EINVAL), "NULL pointer" },
97  { MFX_ERR_UNSUPPORTED, AVERROR(ENOSYS), "unsupported" },
98  { MFX_ERR_MEMORY_ALLOC, AVERROR(ENOMEM), "failed to allocate memory" },
99  { MFX_ERR_NOT_ENOUGH_BUFFER, AVERROR(ENOMEM), "insufficient input/output buffer" },
100  { MFX_ERR_INVALID_HANDLE, AVERROR(EINVAL), "invalid handle" },
101  { MFX_ERR_LOCK_MEMORY, AVERROR(EIO), "failed to lock the memory block" },
102  { MFX_ERR_NOT_INITIALIZED, AVERROR_BUG, "not initialized" },
103  { MFX_ERR_NOT_FOUND, AVERROR(ENOSYS), "specified object was not found" },
104  /* the following 3 errors should always be handled explicitly, so those "mappings"
105  * are for completeness only */
106  { MFX_ERR_MORE_DATA, AVERROR_UNKNOWN, "expect more data at input" },
107  { MFX_ERR_MORE_SURFACE, AVERROR_UNKNOWN, "expect more surface at output" },
108  { MFX_ERR_MORE_BITSTREAM, AVERROR_UNKNOWN, "expect more bitstream at output" },
109  { MFX_ERR_ABORTED, AVERROR_UNKNOWN, "operation aborted" },
110  { MFX_ERR_DEVICE_LOST, AVERROR(EIO), "device lost" },
111  { MFX_ERR_INCOMPATIBLE_VIDEO_PARAM, AVERROR(EINVAL), "incompatible video parameters" },
112  { MFX_ERR_INVALID_VIDEO_PARAM, AVERROR(EINVAL), "invalid video parameters" },
113  { MFX_ERR_UNDEFINED_BEHAVIOR, AVERROR_BUG, "undefined behavior" },
114  { MFX_ERR_DEVICE_FAILED, AVERROR(EIO), "device failed" },
115 #if QSV_HAVE_AUDIO
116  { MFX_ERR_INCOMPATIBLE_AUDIO_PARAM, AVERROR(EINVAL), "incompatible audio parameters" },
117  { MFX_ERR_INVALID_AUDIO_PARAM, AVERROR(EINVAL), "invalid audio parameters" },
118 #endif
119  { MFX_ERR_GPU_HANG, AVERROR(EIO), "GPU Hang" },
120  { MFX_ERR_REALLOC_SURFACE, AVERROR_UNKNOWN, "need bigger surface for output" },
121 
122  { MFX_WRN_IN_EXECUTION, 0, "operation in execution" },
123  { MFX_WRN_DEVICE_BUSY, 0, "device busy" },
124  { MFX_WRN_VIDEO_PARAM_CHANGED, 0, "video parameters changed" },
125  { MFX_WRN_PARTIAL_ACCELERATION, 0, "partial acceleration" },
126  { MFX_WRN_INCOMPATIBLE_VIDEO_PARAM, 0, "incompatible video parameters" },
127  { MFX_WRN_VALUE_NOT_CHANGED, 0, "value is saturated" },
128  { MFX_WRN_OUT_OF_RANGE, 0, "value out of range" },
129  { MFX_WRN_FILTER_SKIPPED, 0, "filter skipped" },
130 #if QSV_HAVE_AUDIO
131  { MFX_WRN_INCOMPATIBLE_AUDIO_PARAM, 0, "incompatible audio parameters" },
132 #endif
133 
134 #if QSV_VERSION_ATLEAST(1, 31)
135  { MFX_ERR_NONE_PARTIAL_OUTPUT, 0, "partial output" },
136 #endif
137 };
138 
139 static int qsv_map_error(mfxStatus mfx_err, const char **desc)
140 {
141  int i;
142  for (i = 0; i < FF_ARRAY_ELEMS(qsv_errors); i++) {
143  if (qsv_errors[i].mfxerr == mfx_err) {
144  if (desc)
145  *desc = qsv_errors[i].desc;
146  return qsv_errors[i].averr;
147  }
148  }
149  if (desc)
150  *desc = "unknown error";
151  return AVERROR_UNKNOWN;
152 }
153 
154 int ff_qsvvpp_print_error(void *log_ctx, mfxStatus err,
155  const char *error_string)
156 {
157  const char *desc;
158  int ret;
159  ret = qsv_map_error(err, &desc);
160  av_log(log_ctx, AV_LOG_ERROR, "%s: %s (%d)\n", error_string, desc, err);
161  return ret;
162 }
163 
164 int ff_qsvvpp_print_warning(void *log_ctx, mfxStatus err,
165  const char *warning_string)
166 {
167  const char *desc;
168  int ret;
169  ret = qsv_map_error(err, &desc);
170  av_log(log_ctx, AV_LOG_WARNING, "%s: %s (%d)\n", warning_string, desc, err);
171  return ret;
172 }
173 
174 /* functions for frameAlloc */
175 static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
176  mfxFrameAllocResponse *resp)
177 {
178  QSVVPPContext *s = pthis;
179  int i;
180 
181  if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
182  !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
183  !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
184  return MFX_ERR_UNSUPPORTED;
185 
186  if (req->Type & MFX_MEMTYPE_FROM_VPPIN) {
187  resp->mids = av_mallocz(s->nb_surface_ptrs_in * sizeof(*resp->mids));
188  if (!resp->mids)
189  return AVERROR(ENOMEM);
190 
191  for (i = 0; i < s->nb_surface_ptrs_in; i++)
192  resp->mids[i] = s->surface_ptrs_in[i]->Data.MemId;
193 
194  resp->NumFrameActual = s->nb_surface_ptrs_in;
195  } else {
196  resp->mids = av_mallocz(s->nb_surface_ptrs_out * sizeof(*resp->mids));
197  if (!resp->mids)
198  return AVERROR(ENOMEM);
199 
200  for (i = 0; i < s->nb_surface_ptrs_out; i++)
201  resp->mids[i] = s->surface_ptrs_out[i]->Data.MemId;
202 
203  resp->NumFrameActual = s->nb_surface_ptrs_out;
204  }
205 
206  return MFX_ERR_NONE;
207 }
208 
209 static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
210 {
211  av_freep(&resp->mids);
212  return MFX_ERR_NONE;
213 }
214 
215 static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
216 {
217  return MFX_ERR_UNSUPPORTED;
218 }
219 
220 static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
221 {
222  return MFX_ERR_UNSUPPORTED;
223 }
224 
225 static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
226 {
227  mfxHDLPair *pair_dst = (mfxHDLPair*)hdl;
228  mfxHDLPair *pair_src = (mfxHDLPair*)mid;
229 
230  pair_dst->first = pair_src->first;
231 
232  if (pair_src->second != (mfxMemId)MFX_INFINITE)
233  pair_dst->second = pair_src->second;
234  return MFX_ERR_NONE;
235 }
236 
238 {
239  switch (format) {
240  case AV_PIX_FMT_YUV420P:
241  return MFX_FOURCC_YV12;
242  case AV_PIX_FMT_NV12:
243  return MFX_FOURCC_NV12;
244  case AV_PIX_FMT_YUYV422:
245  return MFX_FOURCC_YUY2;
246  case AV_PIX_FMT_BGRA:
247  return MFX_FOURCC_RGB4;
248  case AV_PIX_FMT_P010:
249  return MFX_FOURCC_P010;
250 #if CONFIG_VAAPI
251  case AV_PIX_FMT_UYVY422:
252  return MFX_FOURCC_UYVY;
253 #endif
254  }
255 
256  return MFX_FOURCC_NV12;
257 }
258 
259 static int map_frame_to_surface(AVFrame *frame, mfxFrameSurface1 *surface)
260 {
261  switch (frame->format) {
262  case AV_PIX_FMT_NV12:
263  case AV_PIX_FMT_P010:
264  surface->Data.Y = frame->data[0];
265  surface->Data.UV = frame->data[1];
266  break;
267  case AV_PIX_FMT_YUV420P:
268  surface->Data.Y = frame->data[0];
269  surface->Data.U = frame->data[1];
270  surface->Data.V = frame->data[2];
271  break;
272  case AV_PIX_FMT_YUYV422:
273  surface->Data.Y = frame->data[0];
274  surface->Data.U = frame->data[0] + 1;
275  surface->Data.V = frame->data[0] + 3;
276  break;
277  case AV_PIX_FMT_RGB32:
278  surface->Data.B = frame->data[0];
279  surface->Data.G = frame->data[0] + 1;
280  surface->Data.R = frame->data[0] + 2;
281  surface->Data.A = frame->data[0] + 3;
282  break;
283  case AV_PIX_FMT_UYVY422:
284  surface->Data.Y = frame->data[0] + 1;
285  surface->Data.U = frame->data[0];
286  surface->Data.V = frame->data[0] + 2;
287  break;
288  default:
289  return MFX_ERR_UNSUPPORTED;
290  }
291  surface->Data.Pitch = frame->linesize[0];
292 
293  return 0;
294 }
295 
296 /* fill the surface info */
297 static int fill_frameinfo_by_link(mfxFrameInfo *frameinfo, AVFilterLink *link)
298 {
299  enum AVPixelFormat pix_fmt;
300  AVHWFramesContext *frames_ctx;
301  AVQSVFramesContext *frames_hwctx;
302  const AVPixFmtDescriptor *desc;
303 
304  if (link->format == AV_PIX_FMT_QSV) {
305  if (!link->hw_frames_ctx)
306  return AVERROR(EINVAL);
307 
308  frames_ctx = (AVHWFramesContext *)link->hw_frames_ctx->data;
309  frames_hwctx = frames_ctx->hwctx;
310  *frameinfo = frames_hwctx->surfaces[0].Info;
311  } else {
312  pix_fmt = link->format;
314  if (!desc)
315  return AVERROR_BUG;
316 
317  frameinfo->CropX = 0;
318  frameinfo->CropY = 0;
319  frameinfo->Width = FFALIGN(link->w, 32);
320  frameinfo->Height = FFALIGN(link->h, 32);
321  frameinfo->PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
322  frameinfo->FourCC = pix_fmt_to_mfx_fourcc(pix_fmt);
323  frameinfo->BitDepthLuma = desc->comp[0].depth;
324  frameinfo->BitDepthChroma = desc->comp[0].depth;
325  frameinfo->Shift = desc->comp[0].depth > 8;
326  if (desc->log2_chroma_w && desc->log2_chroma_h)
327  frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV420;
328  else if (desc->log2_chroma_w)
329  frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV422;
330  else
331  frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV444;
332  }
333 
334  frameinfo->CropW = link->w;
335  frameinfo->CropH = link->h;
336  frameinfo->FrameRateExtN = link->frame_rate.num;
337  frameinfo->FrameRateExtD = link->frame_rate.den;
338 
339  /* Apparently VPP in the SDK requires the frame rate to be set to some value, otherwise
340  * init will fail */
341  if (frameinfo->FrameRateExtD == 0 || frameinfo->FrameRateExtN == 0) {
342  frameinfo->FrameRateExtN = 25;
343  frameinfo->FrameRateExtD = 1;
344  }
345 
346  frameinfo->AspectRatioW = link->sample_aspect_ratio.num ? link->sample_aspect_ratio.num : 1;
347  frameinfo->AspectRatioH = link->sample_aspect_ratio.den ? link->sample_aspect_ratio.den : 1;
348 
349  return 0;
350 }
351 
353 {
354  while (list) {
355  /* list->queued==1 means the frame is not cached in VPP
356  * process any more, it can be released to pool. */
357  if ((list->queued == 1) && !list->surface.Data.Locked) {
358  av_frame_free(&list->frame);
359  list->queued = 0;
360  }
361  list = list->next;
362  }
363 }
364 
366 {
367  while (*list) {
368  QSVFrame *frame;
369 
370  frame = *list;
371  *list = (*list)->next;
372  av_frame_free(&frame->frame);
373  av_freep(&frame);
374  }
375 }
376 
378 {
379  QSVFrame *out = *list;
380 
381  for (; out; out = out->next) {
382  if (!out->queued) {
383  out->queued = 1;
384  break;
385  }
386  }
387 
388  if (!out) {
389  out = av_mallocz(sizeof(*out));
390  if (!out) {
391  av_log(NULL, AV_LOG_ERROR, "Can't alloc new output frame.\n");
392  return NULL;
393  }
394  out->queued = 1;
395  out->next = *list;
396  *list = out;
397  }
398 
399  return out;
400 }
401 
402 /* get the input surface */
404 {
405  QSVFrame *qsv_frame;
406  AVFilterContext *ctx = inlink->dst;
407 
408  clear_unused_frames(s->in_frame_list);
409 
410  qsv_frame = get_free_frame(&s->in_frame_list);
411  if (!qsv_frame)
412  return NULL;
413 
414  /* Turn AVFrame into mfxFrameSurface1.
415  * For video/opaque memory mode, pix_fmt is AV_PIX_FMT_QSV, and
416  * mfxFrameSurface1 is stored in AVFrame->data[3];
417  * for system memory mode, raw video data is stored in
418  * AVFrame, we should map it into mfxFrameSurface1.
419  */
420  if (!IS_SYSTEM_MEMORY(s->in_mem_mode)) {
421  if (picref->format != AV_PIX_FMT_QSV) {
422  av_log(ctx, AV_LOG_ERROR, "QSVVPP gets a wrong frame.\n");
423  return NULL;
424  }
425  qsv_frame->frame = av_frame_clone(picref);
426  qsv_frame->surface = *(mfxFrameSurface1 *)qsv_frame->frame->data[3];
427  } else {
428  /* make a copy if the input is not padded as libmfx requires */
429  if (picref->height & 31 || picref->linesize[0] & 31) {
430  qsv_frame->frame = ff_get_video_buffer(inlink,
431  FFALIGN(inlink->w, 32),
432  FFALIGN(inlink->h, 32));
433  if (!qsv_frame->frame)
434  return NULL;
435 
436  qsv_frame->frame->width = picref->width;
437  qsv_frame->frame->height = picref->height;
438 
439  if (av_frame_copy(qsv_frame->frame, picref) < 0) {
440  av_frame_free(&qsv_frame->frame);
441  return NULL;
442  }
443 
444  if (av_frame_copy_props(qsv_frame->frame, picref) < 0) {
445  av_frame_free(&qsv_frame->frame);
446  return NULL;
447  }
448  } else
449  qsv_frame->frame = av_frame_clone(picref);
450 
451  if (map_frame_to_surface(qsv_frame->frame,
452  &qsv_frame->surface) < 0) {
453  av_log(ctx, AV_LOG_ERROR, "Unsupported frame.\n");
454  return NULL;
455  }
456  }
457 
458  qsv_frame->surface.Info = s->frame_infos[FF_INLINK_IDX(inlink)];
459  qsv_frame->surface.Data.TimeStamp = av_rescale_q(qsv_frame->frame->pts,
460  inlink->time_base, default_tb);
461 
462  qsv_frame->surface.Info.PicStruct =
463  !(qsv_frame->frame->flags & AV_FRAME_FLAG_INTERLACED) ? MFX_PICSTRUCT_PROGRESSIVE :
464  ((qsv_frame->frame->flags & AV_FRAME_FLAG_TOP_FIELD_FIRST) ? MFX_PICSTRUCT_FIELD_TFF :
465  MFX_PICSTRUCT_FIELD_BFF);
466  if (qsv_frame->frame->repeat_pict == 1)
467  qsv_frame->surface.Info.PicStruct |= MFX_PICSTRUCT_FIELD_REPEATED;
468  else if (qsv_frame->frame->repeat_pict == 2)
469  qsv_frame->surface.Info.PicStruct |= MFX_PICSTRUCT_FRAME_DOUBLING;
470  else if (qsv_frame->frame->repeat_pict == 4)
471  qsv_frame->surface.Info.PicStruct |= MFX_PICSTRUCT_FRAME_TRIPLING;
472 
473  return qsv_frame;
474 }
475 
476 /* get the output surface */
477 static QSVFrame *query_frame(QSVVPPContext *s, AVFilterLink *outlink, const AVFrame *in)
478 {
479  AVFilterContext *ctx = outlink->src;
480  QSVFrame *out_frame;
481  int ret;
482 
483  clear_unused_frames(s->out_frame_list);
484 
485  out_frame = get_free_frame(&s->out_frame_list);
486  if (!out_frame)
487  return NULL;
488 
489  /* For video memory, get a hw frame;
490  * For system memory, get a sw frame and map it into a mfx_surface. */
491  if (!IS_SYSTEM_MEMORY(s->out_mem_mode)) {
492  out_frame->frame = av_frame_alloc();
493  if (!out_frame->frame)
494  return NULL;
495 
496  ret = av_frame_copy_props(out_frame->frame, in);
497  if (ret < 0) {
498  av_log(ctx, AV_LOG_ERROR, "Failed to copy metadata fields from src to dst.\n");
499  return NULL;
500  }
501 
502  ret = av_hwframe_get_buffer(outlink->hw_frames_ctx, out_frame->frame, 0);
503  if (ret < 0) {
504  av_log(ctx, AV_LOG_ERROR, "Can't allocate a surface.\n");
505  return NULL;
506  }
507 
508  out_frame->surface = *(mfxFrameSurface1 *)out_frame->frame->data[3];
509  } else {
510  /* Get a frame with aligned dimensions.
511  * Libmfx need system memory being 128x64 aligned */
512  out_frame->frame = ff_get_video_buffer(outlink,
513  FFALIGN(outlink->w, 128),
514  FFALIGN(outlink->h, 64));
515  if (!out_frame->frame)
516  return NULL;
517 
518  ret = av_frame_copy_props(out_frame->frame, in);
519  if (ret < 0) {
520  av_log(ctx, AV_LOG_ERROR, "Failed to copy metadata fields from src to dst.\n");
521  return NULL;
522  }
523 
524  ret = map_frame_to_surface(out_frame->frame,
525  &out_frame->surface);
526  if (ret < 0)
527  return NULL;
528  }
529 
530  if (outlink->frame_rate.num && outlink->frame_rate.den)
531  out_frame->frame->duration = av_rescale_q(1, av_inv_q(outlink->frame_rate), outlink->time_base);
532  else
533  out_frame->frame->duration = 0;
534 
535  out_frame->frame->width = outlink->w;
536  out_frame->frame->height = outlink->h;
537  out_frame->surface.Info = s->vpp_param.vpp.Out;
538 
539  for (int i = 0; i < s->vpp_param.NumExtParam; i++) {
540  mfxExtBuffer *extbuf = s->vpp_param.ExtParam[i];
541 
542  if (extbuf->BufferId == MFX_EXTBUFF_VPP_DEINTERLACING) {
543 #if FF_API_INTERLACED_FRAME
545  out_frame->frame->interlaced_frame = 0;
547 #endif
548  out_frame->frame->flags &= ~AV_FRAME_FLAG_INTERLACED;
549  break;
550  }
551  }
552 
553  out_frame->surface.Info.PicStruct =
554  !(out_frame->frame->flags & AV_FRAME_FLAG_INTERLACED) ? MFX_PICSTRUCT_PROGRESSIVE :
555  ((out_frame->frame->flags & AV_FRAME_FLAG_TOP_FIELD_FIRST) ? MFX_PICSTRUCT_FIELD_TFF :
556  MFX_PICSTRUCT_FIELD_BFF);
557 
558  return out_frame;
559 }
560 
561 /* create the QSV session */
563 {
564  AVFilterLink *inlink = avctx->inputs[0];
565  AVFilterLink *outlink = avctx->outputs[0];
566  AVQSVFramesContext *in_frames_hwctx = NULL;
567  AVQSVFramesContext *out_frames_hwctx = NULL;
568 
569  AVBufferRef *device_ref;
570  AVHWDeviceContext *device_ctx;
571  AVQSVDeviceContext *device_hwctx;
572  mfxHDL handle;
573  mfxHandleType handle_type;
574  mfxVersion ver;
575  mfxIMPL impl;
576  int ret, i;
577 
578  if (inlink->hw_frames_ctx) {
579  AVHWFramesContext *frames_ctx = (AVHWFramesContext *)inlink->hw_frames_ctx->data;
580 
581  device_ref = frames_ctx->device_ref;
582  in_frames_hwctx = frames_ctx->hwctx;
583 
584  s->in_mem_mode = in_frames_hwctx->frame_type;
585 
586  s->surface_ptrs_in = av_calloc(in_frames_hwctx->nb_surfaces,
587  sizeof(*s->surface_ptrs_in));
588  if (!s->surface_ptrs_in)
589  return AVERROR(ENOMEM);
590 
591  for (i = 0; i < in_frames_hwctx->nb_surfaces; i++)
592  s->surface_ptrs_in[i] = in_frames_hwctx->surfaces + i;
593 
594  s->nb_surface_ptrs_in = in_frames_hwctx->nb_surfaces;
595  } else if (avctx->hw_device_ctx) {
596  device_ref = avctx->hw_device_ctx;
597  s->in_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
598  } else {
599  av_log(avctx, AV_LOG_ERROR, "No hw context provided.\n");
600  return AVERROR(EINVAL);
601  }
602 
603  device_ctx = (AVHWDeviceContext *)device_ref->data;
604  device_hwctx = device_ctx->hwctx;
605 
606  if (outlink->format == AV_PIX_FMT_QSV) {
607  AVHWFramesContext *out_frames_ctx;
608  AVBufferRef *out_frames_ref = av_hwframe_ctx_alloc(device_ref);
609  if (!out_frames_ref)
610  return AVERROR(ENOMEM);
611 
612 #if QSV_HAVE_OPAQUE
613  s->out_mem_mode = IS_OPAQUE_MEMORY(s->in_mem_mode) ?
614  MFX_MEMTYPE_OPAQUE_FRAME :
615  MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | MFX_MEMTYPE_FROM_VPPOUT;
616 #else
617  s->out_mem_mode = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | MFX_MEMTYPE_FROM_VPPOUT;
618 #endif
619 
620  out_frames_ctx = (AVHWFramesContext *)out_frames_ref->data;
621  out_frames_hwctx = out_frames_ctx->hwctx;
622 
623  out_frames_ctx->format = AV_PIX_FMT_QSV;
624  out_frames_ctx->width = FFALIGN(outlink->w, 32);
625  out_frames_ctx->height = FFALIGN(outlink->h, 32);
626  out_frames_ctx->sw_format = s->out_sw_format;
627  out_frames_ctx->initial_pool_size = 64;
628  if (avctx->extra_hw_frames > 0)
629  out_frames_ctx->initial_pool_size += avctx->extra_hw_frames;
630  out_frames_hwctx->frame_type = s->out_mem_mode;
631 
632  ret = av_hwframe_ctx_init(out_frames_ref);
633  if (ret < 0) {
634  av_buffer_unref(&out_frames_ref);
635  av_log(avctx, AV_LOG_ERROR, "Error creating frames_ctx for output pad.\n");
636  return ret;
637  }
638 
639  s->surface_ptrs_out = av_calloc(out_frames_hwctx->nb_surfaces,
640  sizeof(*s->surface_ptrs_out));
641  if (!s->surface_ptrs_out) {
642  av_buffer_unref(&out_frames_ref);
643  return AVERROR(ENOMEM);
644  }
645 
646  for (i = 0; i < out_frames_hwctx->nb_surfaces; i++)
647  s->surface_ptrs_out[i] = out_frames_hwctx->surfaces + i;
648  s->nb_surface_ptrs_out = out_frames_hwctx->nb_surfaces;
649 
650  av_buffer_unref(&outlink->hw_frames_ctx);
651  outlink->hw_frames_ctx = out_frames_ref;
652  } else
653  s->out_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
654 
655  /* extract the properties of the "master" session given to us */
656  ret = MFXQueryIMPL(device_hwctx->session, &impl);
657  if (ret == MFX_ERR_NONE)
658  ret = MFXQueryVersion(device_hwctx->session, &ver);
659  if (ret != MFX_ERR_NONE) {
660  av_log(avctx, AV_LOG_ERROR, "Error querying the session attributes\n");
661  return AVERROR_UNKNOWN;
662  }
663 
664  if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(impl)) {
665  handle_type = MFX_HANDLE_VA_DISPLAY;
666  } else if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(impl)) {
667  handle_type = MFX_HANDLE_D3D11_DEVICE;
668  } else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(impl)) {
669  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
670  } else {
671  av_log(avctx, AV_LOG_ERROR, "Error unsupported handle type\n");
672  return AVERROR_UNKNOWN;
673  }
674 
675  ret = MFXVideoCORE_GetHandle(device_hwctx->session, handle_type, &handle);
676  if (ret < 0)
677  return ff_qsvvpp_print_error(avctx, ret, "Error getting the session handle");
678  else if (ret > 0) {
679  ff_qsvvpp_print_warning(avctx, ret, "Warning in getting the session handle");
680  return AVERROR_UNKNOWN;
681  }
682 
683  /* create a "slave" session with those same properties, to be used for vpp */
684  ret = ff_qsvvpp_create_mfx_session(avctx, device_hwctx->loader, impl, &ver,
685  &s->session);
686  if (ret)
687  return ret;
688 
689  ret = MFXQueryVersion(s->session, &s->ver);
690  if (ret != MFX_ERR_NONE) {
691  av_log(avctx, AV_LOG_ERROR, "Error querying the runtime version\n");
692  return AVERROR_UNKNOWN;
693  }
694 
695  if (handle) {
696  ret = MFXVideoCORE_SetHandle(s->session, handle_type, handle);
697  if (ret != MFX_ERR_NONE)
698  return AVERROR_UNKNOWN;
699  }
700 
701  if (QSV_RUNTIME_VERSION_ATLEAST(ver, 1, 25)) {
702  ret = MFXJoinSession(device_hwctx->session, s->session);
703  if (ret != MFX_ERR_NONE)
704  return AVERROR_UNKNOWN;
705  }
706 
707 #if QSV_HAVE_OPAQUE
708  if (IS_OPAQUE_MEMORY(s->in_mem_mode) || IS_OPAQUE_MEMORY(s->out_mem_mode)) {
709  s->opaque_alloc.In.Surfaces = s->surface_ptrs_in;
710  s->opaque_alloc.In.NumSurface = s->nb_surface_ptrs_in;
711  s->opaque_alloc.In.Type = s->in_mem_mode;
712 
713  s->opaque_alloc.Out.Surfaces = s->surface_ptrs_out;
714  s->opaque_alloc.Out.NumSurface = s->nb_surface_ptrs_out;
715  s->opaque_alloc.Out.Type = s->out_mem_mode;
716 
717  s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
718  s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
719  } else
720 #endif
721  if (IS_VIDEO_MEMORY(s->in_mem_mode) || IS_VIDEO_MEMORY(s->out_mem_mode)) {
722  mfxFrameAllocator frame_allocator = {
723  .pthis = s,
724  .Alloc = frame_alloc,
725  .Lock = frame_lock,
726  .Unlock = frame_unlock,
727  .GetHDL = frame_get_hdl,
728  .Free = frame_free,
729  };
730 
731  ret = MFXVideoCORE_SetFrameAllocator(s->session, &frame_allocator);
732  if (ret != MFX_ERR_NONE)
733  return AVERROR_UNKNOWN;
734  }
735 
736  return 0;
737 }
738 
740 {
741  return 0;
742 }
743 
745 {
746  int i;
747  int ret;
748  QSVVPPContext *s = avctx->priv;
749 
750  s->filter_frame = param->filter_frame;
751  if (!s->filter_frame)
752  s->filter_frame = ff_filter_frame;
753  s->out_sw_format = param->out_sw_format;
754 
755  s->set_frame_ext_params = param->set_frame_ext_params;
756  if (!s->set_frame_ext_params)
757  s->set_frame_ext_params = set_frame_ext_params_null;
758 
759  /* create the vpp session */
760  ret = init_vpp_session(avctx, s);
761  if (ret < 0)
762  goto failed;
763 
764  s->frame_infos = av_calloc(avctx->nb_inputs, sizeof(*s->frame_infos));
765  if (!s->frame_infos) {
766  ret = AVERROR(ENOMEM);
767  goto failed;
768  }
769 
770  /* Init each input's information */
771  for (i = 0; i < avctx->nb_inputs; i++) {
772  ret = fill_frameinfo_by_link(&s->frame_infos[i], avctx->inputs[i]);
773  if (ret < 0)
774  goto failed;
775  }
776 
777  /* Update input's frame info according to crop */
778  for (i = 0; i < param->num_crop; i++) {
779  QSVVPPCrop *crop = param->crop + i;
780  if (crop->in_idx > avctx->nb_inputs) {
781  ret = AVERROR(EINVAL);
782  goto failed;
783  }
784  s->frame_infos[crop->in_idx].CropX = crop->x;
785  s->frame_infos[crop->in_idx].CropY = crop->y;
786  s->frame_infos[crop->in_idx].CropW = crop->w;
787  s->frame_infos[crop->in_idx].CropH = crop->h;
788  }
789 
790  s->vpp_param.vpp.In = s->frame_infos[0];
791 
792  ret = fill_frameinfo_by_link(&s->vpp_param.vpp.Out, avctx->outputs[0]);
793  if (ret < 0) {
794  av_log(avctx, AV_LOG_ERROR, "Fail to get frame info from link.\n");
795  goto failed;
796  }
797 
798  s->nb_seq_buffers = param->num_ext_buf;
799 #if QSV_HAVE_OPAQUE
800  if (IS_OPAQUE_MEMORY(s->in_mem_mode) || IS_OPAQUE_MEMORY(s->out_mem_mode))
801  s->nb_seq_buffers++;
802 #endif
803 
804  if (s->nb_seq_buffers) {
805  s->seq_buffers = av_calloc(s->nb_seq_buffers, sizeof(*s->seq_buffers));
806  if (!s->seq_buffers) {
807  ret = AVERROR(ENOMEM);
808  goto failed;
809  }
810 
811  for (i = 0; i < param->num_ext_buf; i++)
812  s->seq_buffers[i] = param->ext_buf[i];
813 
814 #if QSV_HAVE_OPAQUE
815  if (IS_OPAQUE_MEMORY(s->in_mem_mode) || IS_OPAQUE_MEMORY(s->out_mem_mode))
816  s->seq_buffers[i] = (mfxExtBuffer *)&s->opaque_alloc;
817 #endif
818 
819  s->nb_ext_buffers = s->nb_seq_buffers;
820  s->ext_buffers = av_calloc(s->nb_ext_buffers, sizeof(*s->ext_buffers));
821  if (!s->ext_buffers) {
822  ret = AVERROR(ENOMEM);
823  goto failed;
824  }
825 
826  memcpy(s->ext_buffers, s->seq_buffers, s->nb_seq_buffers * sizeof(*s->seq_buffers));
827  }
828 
829  s->vpp_param.ExtParam = s->ext_buffers;
830  s->vpp_param.NumExtParam = s->nb_ext_buffers;
831 
832  s->got_frame = 0;
833 
834  /** keep fifo size at least 1. Even when async_depth is 0, fifo is used. */
835  s->async_fifo = av_fifo_alloc2(s->async_depth + 1, sizeof(QSVAsyncFrame), 0);
836  if (!s->async_fifo) {
837  ret = AVERROR(ENOMEM);
838  goto failed;
839  }
840 
841  s->vpp_param.AsyncDepth = s->async_depth;
842 
843  if (IS_SYSTEM_MEMORY(s->in_mem_mode))
844  s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_SYSTEM_MEMORY;
845  else if (IS_VIDEO_MEMORY(s->in_mem_mode))
846  s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_VIDEO_MEMORY;
847 #if QSV_HAVE_OPAQUE
848  else if (IS_OPAQUE_MEMORY(s->in_mem_mode))
849  s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_OPAQUE_MEMORY;
850 #endif
851 
852  if (IS_SYSTEM_MEMORY(s->out_mem_mode))
853  s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
854  else if (IS_VIDEO_MEMORY(s->out_mem_mode))
855  s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_VIDEO_MEMORY;
856 #if QSV_HAVE_OPAQUE
857  else if (IS_OPAQUE_MEMORY(s->out_mem_mode))
858  s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_OPAQUE_MEMORY;
859 #endif
860 
861  /* Print input memory mode */
862  ff_qsvvpp_print_iopattern(avctx, s->vpp_param.IOPattern & 0x0F, "VPP");
863  /* Print output memory mode */
864  ff_qsvvpp_print_iopattern(avctx, s->vpp_param.IOPattern & 0xF0, "VPP");
865 
866  /* Validate VPP params, but don't initial VPP session here */
867  ret = MFXVideoVPP_Query(s->session, &s->vpp_param, &s->vpp_param);
868  if (ret < 0) {
869  ret = ff_qsvvpp_print_error(avctx, ret, "Error querying VPP params");
870  goto failed;
871  } else if (ret > 0)
872  ff_qsvvpp_print_warning(avctx, ret, "Warning When querying VPP params");
873 
874  return 0;
875 
876 failed:
877  ff_qsvvpp_close(avctx);
878 
879  return ret;
880 }
881 
883 {
884  int ret;
885  mfxExtBuffer *ext_param[QSVVPP_MAX_FRAME_EXTBUFS];
886  QSVVPPFrameParam fp = { 0, ext_param };
887 
888  ret = s->set_frame_ext_params(avctx, in->frame, out->frame, &fp);
889  if (ret)
890  return ret;
891 
892  if (fp.num_ext_buf) {
893  av_freep(&s->ext_buffers);
894  s->nb_ext_buffers = s->nb_seq_buffers + fp.num_ext_buf;
895 
896  s->ext_buffers = av_calloc(s->nb_ext_buffers, sizeof(*s->ext_buffers));
897  if (!s->ext_buffers)
898  return AVERROR(ENOMEM);
899 
900  memcpy(&s->ext_buffers[0], s->seq_buffers, s->nb_seq_buffers * sizeof(*s->seq_buffers));
901  memcpy(&s->ext_buffers[s->nb_seq_buffers], fp.ext_buf, fp.num_ext_buf * sizeof(*fp.ext_buf));
902  s->vpp_param.ExtParam = s->ext_buffers;
903  s->vpp_param.NumExtParam = s->nb_ext_buffers;
904  }
905 
906  if (!s->vpp_initted) {
907  s->vpp_param.vpp.In.PicStruct = in->surface.Info.PicStruct;
908  s->vpp_param.vpp.Out.PicStruct = out->surface.Info.PicStruct;
909 
910  /* Query VPP params again, including params for frame */
911  ret = MFXVideoVPP_Query(s->session, &s->vpp_param, &s->vpp_param);
912  if (ret < 0)
913  return ff_qsvvpp_print_error(avctx, ret, "Error querying VPP params");
914  else if (ret > 0)
915  ff_qsvvpp_print_warning(avctx, ret, "Warning When querying VPP params");
916 
917  ret = MFXVideoVPP_Init(s->session, &s->vpp_param);
918  if (ret < 0)
919  return ff_qsvvpp_print_error(avctx, ret, "Failed to create a qsvvpp");
920  else if (ret > 0)
921  ff_qsvvpp_print_warning(avctx, ret, "Warning When creating qsvvpp");
922 
923  s->vpp_initted = 1;
924  } else if (fp.num_ext_buf) {
925  ret = MFXVideoVPP_Reset(s->session, &s->vpp_param);
926  if (ret < 0) {
927  ret = ff_qsvvpp_print_error(avctx, ret, "Failed to reset session for qsvvpp");
928  return ret;
929  } else if (ret > 0)
930  ff_qsvvpp_print_warning(avctx, ret, "Warning When resetting session for qsvvpp");
931  }
932 
933  return 0;
934 }
935 
937 {
938  QSVVPPContext *s = avctx->priv;
939 
940  if (s->session) {
941  MFXVideoVPP_Close(s->session);
942  MFXClose(s->session);
943  s->session = NULL;
944  s->vpp_initted = 0;
945  }
946 
947  /* release all the resources */
948  clear_frame_list(&s->in_frame_list);
949  clear_frame_list(&s->out_frame_list);
950  av_freep(&s->surface_ptrs_in);
951  av_freep(&s->surface_ptrs_out);
952  av_freep(&s->seq_buffers);
953  av_freep(&s->ext_buffers);
954  av_freep(&s->frame_infos);
955  av_fifo_freep2(&s->async_fifo);
956 
957  return 0;
958 }
959 
961 {
962  AVFilterContext *ctx = inlink->dst;
963  AVFilterLink *outlink = ctx->outputs[0];
964  QSVAsyncFrame aframe;
965  mfxSyncPoint sync;
966  QSVFrame *in_frame, *out_frame;
967  int ret, ret1, filter_ret;
968 
969  while (s->eof && av_fifo_read(s->async_fifo, &aframe, 1) >= 0) {
970  if (MFXVideoCORE_SyncOperation(s->session, aframe.sync, 1000) < 0)
971  av_log(ctx, AV_LOG_WARNING, "Sync failed.\n");
972 
973  filter_ret = s->filter_frame(outlink, aframe.frame->frame);
974  if (filter_ret < 0) {
975  av_frame_free(&aframe.frame->frame);
976  return filter_ret;
977  }
978  aframe.frame->queued--;
979  s->got_frame = 1;
980  aframe.frame->frame = NULL;
981  };
982 
983  if (!picref)
984  return 0;
985 
986  in_frame = submit_frame(s, inlink, picref);
987  if (!in_frame) {
988  av_log(ctx, AV_LOG_ERROR, "Failed to submit frame on input[%d]\n",
990  return AVERROR(ENOMEM);
991  }
992 
993  do {
994  out_frame = query_frame(s, outlink, in_frame->frame);
995  if (!out_frame) {
996  av_log(ctx, AV_LOG_ERROR, "Failed to query an output frame.\n");
997  return AVERROR(ENOMEM);
998  }
999 
1000  ret = qsvvpp_init_vpp_session(ctx, s, in_frame, out_frame);
1001  if (ret)
1002  return ret;
1003 
1004  do {
1005  ret = MFXVideoVPP_RunFrameVPPAsync(s->session, &in_frame->surface,
1006  &out_frame->surface, NULL, &sync);
1007  if (ret == MFX_WRN_DEVICE_BUSY)
1008  av_usleep(500);
1009  } while (ret == MFX_WRN_DEVICE_BUSY);
1010 
1011  if (ret < 0 && ret != MFX_ERR_MORE_SURFACE) {
1012  /* Ignore more_data error */
1013  if (ret == MFX_ERR_MORE_DATA)
1014  return AVERROR(EAGAIN);
1015  break;
1016  }
1017  out_frame->frame->pts = av_rescale_q(out_frame->surface.Data.TimeStamp,
1018  default_tb, outlink->time_base);
1019 
1020  out_frame->queued++;
1021  aframe = (QSVAsyncFrame){ sync, out_frame };
1022  av_fifo_write(s->async_fifo, &aframe, 1);
1023 
1024  if (av_fifo_can_read(s->async_fifo) > s->async_depth) {
1025  av_fifo_read(s->async_fifo, &aframe, 1);
1026 
1027  do {
1028  ret1 = MFXVideoCORE_SyncOperation(s->session, aframe.sync, 1000);
1029  } while (ret1 == MFX_WRN_IN_EXECUTION);
1030 
1031  if (ret1 < 0) {
1032  ret = ret1;
1033  break;
1034  }
1035 
1036  filter_ret = s->filter_frame(outlink, aframe.frame->frame);
1037  if (filter_ret < 0) {
1038  av_frame_free(&aframe.frame->frame);
1039  return filter_ret;
1040  }
1041 
1042  aframe.frame->queued--;
1043  s->got_frame = 1;
1044  aframe.frame->frame = NULL;
1045  }
1046  } while(ret == MFX_ERR_MORE_SURFACE);
1047 
1048  if (ret < 0)
1049  return ff_qsvvpp_print_error(ctx, ret, "Error running VPP");
1050  else if (ret > 0)
1051  ff_qsvvpp_print_warning(ctx, ret, "Warning in running VPP");
1052 
1053  return 0;
1054 }
1055 
1056 #if QSV_ONEVPL
1057 
1059  void *loader,
1060  mfxIMPL implementation,
1061  mfxVersion *pver,
1062  mfxSession *psession)
1063 {
1064  mfxStatus sts;
1065  mfxSession session = NULL;
1066  uint32_t impl_idx = 0;
1067 
1069  "Use Intel(R) oneVPL to create MFX session with the specified MFX loader\n");
1070 
1071  if (!loader) {
1072  av_log(ctx, AV_LOG_ERROR, "Invalid MFX Loader handle\n");
1073  return AVERROR(EINVAL);
1074  }
1075 
1076  while (1) {
1077  /* Enumerate all implementations */
1078  mfxImplDescription *impl_desc;
1079 
1080  sts = MFXEnumImplementations(loader, impl_idx,
1081  MFX_IMPLCAPS_IMPLDESCSTRUCTURE,
1082  (mfxHDL *)&impl_desc);
1083  /* Failed to find an available implementation */
1084  if (sts == MFX_ERR_NOT_FOUND)
1085  break;
1086  else if (sts != MFX_ERR_NONE) {
1087  impl_idx++;
1088  continue;
1089  }
1090 
1091  sts = MFXCreateSession(loader, impl_idx, &session);
1092  MFXDispReleaseImplDescription(loader, impl_desc);
1093  if (sts == MFX_ERR_NONE)
1094  break;
1095 
1096  impl_idx++;
1097  }
1098 
1099  if (sts < 0)
1100  return ff_qsvvpp_print_error(ctx, sts,
1101  "Error creating a MFX session");
1102  else if (sts > 0) {
1104  "Warning in MFX session creation");
1105  return AVERROR_UNKNOWN;
1106  }
1107 
1108  *psession = session;
1109 
1110  return 0;
1111 }
1112 
1113 #else
1114 
1116  void *loader,
1117  mfxIMPL implementation,
1118  mfxVersion *pver,
1119  mfxSession *psession)
1120 {
1121  mfxSession session = NULL;
1122  mfxStatus sts;
1123 
1125  "Use Intel(R) Media SDK to create MFX session, API version is "
1126  "%d.%d, the required implementation version is %d.%d\n",
1127  MFX_VERSION_MAJOR, MFX_VERSION_MINOR, pver->Major, pver->Minor);
1128 
1129  *psession = NULL;
1130  sts = MFXInit(implementation, pver, &session);
1131  if (sts < 0)
1132  return ff_qsvvpp_print_error(ctx, sts,
1133  "Error initializing an MFX session");
1134  else if (sts > 0) {
1135  ff_qsvvpp_print_warning(ctx, sts, "Warning in MFX session initialization");
1136  return AVERROR_UNKNOWN;
1137  }
1138 
1139  *psession = session;
1140 
1141  return 0;
1142 }
1143 
1144 #endif
1145 
1147 {
1148  /* When process YUV420 frames, FFmpeg uses same alignment on Y/U/V
1149  * planes. VPL and MSDK use Y plane's pitch / 2 as U/V planes's
1150  * pitch, which makes U/V planes 16-bytes aligned. We need to set a
1151  * separate alignment to meet runtime's behaviour.
1152  */
1154  FFALIGN(inlink->w, 32),
1155  FFALIGN(inlink->h, 32),
1156  16);
1157 }
ff_default_get_video_buffer2
AVFrame * ff_default_get_video_buffer2(AVFilterLink *link, int w, int h, int align)
Definition: video.c:49
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:112
frame_get_hdl
static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
Definition: qsvvpp.c:225
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:93
FF_ENABLE_DEPRECATION_WARNINGS
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:73
QSVVPPCrop::in_idx
int in_idx
Input index.
Definition: qsvvpp.h:106
AVQSVFramesContext::frame_type
int frame_type
A combination of MFX_MEMTYPE_* describing the frame pool.
Definition: hwcontext_qsv.h:60
mfx_iopattern
int mfx_iopattern
Definition: qsvvpp.c:57
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
IS_OPAQUE_MEMORY
#define IS_OPAQUE_MEMORY(mode)
Definition: qsvvpp.c:42
out
FILE * out
Definition: movenc.c:54
init_vpp_session
static int init_vpp_session(AVFilterContext *avctx, QSVVPPContext *s)
Definition: qsvvpp.c:562
frame_alloc
static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
Definition: qsvvpp.c:175
QSVVPPParam::crop
QSVVPPCrop * crop
Definition: qsvvpp.h:124
QSVVPPParam::out_sw_format
enum AVPixelFormat out_sw_format
Definition: qsvvpp.h:120
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1007
AVFrame::duration
int64_t duration
Duration of the frame, in the same units as pts.
Definition: frame.h:807
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2968
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:210
ff_qsvvpp_get_video_buffer
AVFrame * ff_qsvvpp_get_video_buffer(AVFilterLink *inlink, int w, int h)
Definition: qsvvpp.c:1146
query_frame
static QSVFrame * query_frame(QSVVPPContext *s, AVFilterLink *outlink, const AVFrame *in)
Definition: qsvvpp.c:477
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:100
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:338
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:340
pixdesc.h
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:452
AVFrame::width
int width
Definition: frame.h:412
AVQSVDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
w
uint8_t w
Definition: llviddspenc.c:38
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:252
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVFilterContext::hw_device_ctx
AVBufferRef * hw_device_ctx
For filters which will create hardware frames, sets the device the filter should create them in.
Definition: avfilter.h:469
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:102
mathematics.h
AVFrame::flags
int flags
Frame flags, a combination of AV_FRAME_FLAGS.
Definition: frame.h:649
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:230
video.h
QSVFrame::frame
AVFrame * frame
Definition: qsv_internal.h:80
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:361
AV_FRAME_FLAG_TOP_FIELD_FIRST
#define AV_FRAME_FLAG_TOP_FIELD_FIRST
A flag to mark frames where the top field is displayed first if the content is interlaced.
Definition: frame.h:641
AVFilterContext::priv
void * priv
private data for use by the filter
Definition: avfilter.h:424
qsvvpp.h
av_fifo_write
int av_fifo_write(AVFifo *f, const void *buf, size_t nb_elems)
Write data into a FIFO.
Definition: fifo.c:188
qsv_errors
static const struct @256 qsv_errors[]
AVFilterContext::extra_hw_frames
int extra_hw_frames
Sets the number of extra hardware frames which the filter will allocate on its output links for use i...
Definition: avfilter.h:499
clear_unused_frames
static void clear_unused_frames(QSVFrame *list)
Definition: qsvvpp.c:352
AVRational::num
int num
Numerator.
Definition: rational.h:59
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:62
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:88
ff_qsvvpp_print_iopattern
int ff_qsvvpp_print_iopattern(void *log_ctx, int mfx_iopattern, const char *extra_string)
Definition: qsvvpp.c:72
MFX_IMPL_VIA_MASK
#define MFX_IMPL_VIA_MASK(impl)
Definition: qsvvpp.c:45
AVFrame::interlaced_frame
attribute_deprecated int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:530
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_fifo_read
int av_fifo_read(AVFifo *f, void *buf, size_t nb_elems)
Read data from a FIFO.
Definition: fifo.c:240
AVHWFramesContext::height
int height
Definition: hwcontext.h:230
QSVVPPCrop::w
int w
Definition: qsvvpp.h:107
s
#define s(width, name)
Definition: cbs_vp9.c:198
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demux_decode.c:41
QSV_RUNTIME_VERSION_ATLEAST
#define QSV_RUNTIME_VERSION_ATLEAST(MFX_VERSION, MAJOR, MINOR)
Definition: qsv_internal.h:63
QSVVPP_MAX_FRAME_EXTBUFS
#define QSVVPP_MAX_FRAME_EXTBUFS
Definition: qsvvpp.h:55
ctx
AVFormatContext * ctx
Definition: movenc.c:48
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:609
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
default_tb
static const AVRational default_tb
Definition: qsvvpp.c:49
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
av_usleep
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
frame
static AVFrame * frame
Definition: demux_decode.c:54
if
if(ret)
Definition: filter_design.txt:179
fill_frameinfo_by_link
static int fill_frameinfo_by_link(mfxFrameInfo *frameinfo, AVFilterLink *link)
Definition: qsvvpp.c:297
QSVFrame
Definition: qsv_internal.h:79
QSVVPPContext
Definition: qsvvpp.h:63
ff_qsvvpp_close
int ff_qsvvpp_close(AVFilterContext *avctx)
Definition: qsvvpp.c:936
AVQSVFramesContext::surfaces
mfxFrameSurface1 * surfaces
Definition: hwcontext_qsv.h:54
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:223
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:736
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
QSVVPPParam::num_crop
int num_crop
Definition: qsvvpp.h:123
QSVVPPParam
Definition: qsvvpp.h:110
QSVVPPCrop::x
int x
Definition: qsvvpp.h:107
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:74
QSV_HAVE_OPAQUE
#define QSV_HAVE_OPAQUE
Definition: qsv_internal.h:68
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:142
pix_fmt_to_mfx_fourcc
static int pix_fmt_to_mfx_fourcc(int format)
Definition: qsvvpp.c:237
ff_qsvvpp_print_error
int ff_qsvvpp_print_error(void *log_ctx, mfxStatus err, const char *error_string)
Definition: qsvvpp.c:154
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:417
av_fifo_can_read
size_t av_fifo_can_read(const AVFifo *f)
Definition: fifo.c:87
list
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining list
Definition: filter_design.txt:25
QSVFrame::surface
mfxFrameSurface1 surface
Definition: qsv_internal.h:81
time.h
AV_PIX_FMT_QSV
@ AV_PIX_FMT_QSV
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:247
fp
#define fp
Definition: regdef.h:44
AVFilterContext::nb_inputs
unsigned nb_inputs
number of input pads
Definition: avfilter.h:418
av_frame_copy
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:899
IS_VIDEO_MEMORY
#define IS_VIDEO_MEMORY(mode)
Definition: qsvvpp.c:39
frame_unlock
static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsvvpp.c:220
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:427
mfxerr
mfxStatus mfxerr
Definition: qsvvpp.c:90
set_frame_ext_params_null
static int set_frame_ext_params_null(AVFilterContext *ctx, const AVFrame *in, AVFrame *out, QSVVPPFrameParam *fp)
Definition: qsvvpp.c:739
AV_PIX_FMT_RGB32
#define AV_PIX_FMT_RGB32
Definition: pixfmt.h:455
AVQSVDeviceContext::loader
void * loader
The mfxLoader handle used for mfxSession creation.
Definition: hwcontext_qsv.h:47
AVQSVFramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_qsv.h:55
internal.h
QSVAsyncFrame::frame
QSVFrame * frame
Definition: qsvdec.c:72
QSVAsyncFrame::sync
mfxSyncPoint sync
Definition: qsvvpp.c:52
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:245
common.h
desc
const char * desc
Definition: qsvvpp.c:58
QSVVPPParam::num_ext_buf
int num_ext_buf
Definition: qsvvpp.h:116
frame_lock
static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsvvpp.c:215
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:254
av_inv_q
static av_always_inline AVRational av_inv_q(AVRational q)
Invert a rational.
Definition: rational.h:159
QSVVPPParam::filter_frame
int(* filter_frame)(AVFilterLink *outlink, AVFrame *frame)
Definition: qsvvpp.h:112
ff_qsvvpp_init
int ff_qsvvpp_init(AVFilterContext *avctx, QSVVPPParam *param)
Definition: qsvvpp.c:744
AV_FRAME_FLAG_INTERLACED
#define AV_FRAME_FLAG_INTERLACED
A flag to mark frames whose content is interlaced.
Definition: frame.h:636
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:262
map_frame_to_surface
static int map_frame_to_surface(AVFrame *frame, mfxFrameSurface1 *surface)
Definition: qsvvpp.c:259
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:125
ret
ret
Definition: filter_design.txt:187
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
QSVFrame::queued
int queued
Definition: qsv_internal.h:99
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:163
av_fifo_alloc2
AVFifo * av_fifo_alloc2(size_t nb_elems, size_t elem_size, unsigned int flags)
Allocate and initialize an AVFifo with a given element size.
Definition: fifo.c:47
AVFrame::sample_aspect_ratio
AVRational sample_aspect_ratio
Sample aspect ratio for the video frame, 0/1 if unknown/unspecified.
Definition: frame.h:447
AV_PIX_FMT_UYVY422
@ AV_PIX_FMT_UYVY422
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
Definition: pixfmt.h:88
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:752
QSVVPPCrop::h
int h
Crop rectangle.
Definition: qsvvpp.h:107
QSVVPPCrop::y
int y
Definition: qsvvpp.h:107
AVFrame::height
int height
Definition: frame.h:412
ff_qsvvpp_filter_frame
int ff_qsvvpp_filter_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
Definition: qsvvpp.c:960
AVQSVDeviceContext::session
mfxSession session
Definition: hwcontext_qsv.h:36
AVRational::den
int den
Denominator.
Definition: rational.h:60
averr
int averr
Definition: qsvvpp.c:91
FF_INLINK_IDX
#define FF_INLINK_IDX(link)
Find the index of a link.
Definition: internal.h:323
qsv_map_error
static int qsv_map_error(mfxStatus mfx_err, const char **desc)
Definition: qsvvpp.c:139
clear_frame_list
static void clear_frame_list(QSVFrame **list)
Definition: qsvvpp.c:365
AVFilterContext
An instance of a filter.
Definition: avfilter.h:409
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:532
FF_DISABLE_DEPRECATION_WARNINGS
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:72
AVQSVFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:53
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:200
qsvvpp_init_vpp_session
static int qsvvpp_init_vpp_session(AVFilterContext *avctx, QSVVPPContext *s, const QSVFrame *in, QSVFrame *out)
Definition: qsvvpp.c:882
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
QSVAsyncFrame::sync
mfxSyncPoint * sync
Definition: qsvdec.c:71
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:52
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:385
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
av_fifo_freep2
void av_fifo_freep2(AVFifo **f)
Free an AVFifo and reset pointer to NULL.
Definition: fifo.c:286
h
h
Definition: vp9dsp_template.c:2038
IS_SYSTEM_MEMORY
#define IS_SYSTEM_MEMORY(mode)
Definition: qsvvpp.c:44
QSVVPPCrop
Definition: qsvvpp.h:105
get_free_frame
static QSVFrame * get_free_frame(QSVFrame **list)
Definition: qsvvpp.c:377
QSVAsyncFrame
Definition: qsvdec.c:70
ff_qsvvpp_create_mfx_session
int ff_qsvvpp_create_mfx_session(void *ctx, void *loader, mfxIMPL implementation, mfxVersion *pver, mfxSession *psession)
Definition: qsvvpp.c:1115
qsv_iopatterns
static const struct @255 qsv_iopatterns[]
av_hwframe_get_buffer
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
Definition: hwcontext.c:511
frame_free
static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
Definition: qsvvpp.c:209
QSVVPPParam::set_frame_ext_params
int(* set_frame_ext_params)(AVFilterContext *ctx, const AVFrame *in, AVFrame *out, QSVVPPFrameParam *fp)
callbak
Definition: qsvvpp.h:113
ff_qsvvpp_print_warning
int ff_qsvvpp_print_warning(void *log_ctx, mfxStatus err, const char *warning_string)
Definition: qsvvpp.c:164
QSVVPPFrameParam
Definition: qsvvpp.h:57
AVFrame::repeat_pict
int repeat_pict
Number of fields in this frame which should be repeated, i.e.
Definition: frame.h:521
QSVVPPParam::ext_buf
mfxExtBuffer ** ext_buf
Definition: qsvvpp.h:117
submit_frame
static QSVFrame * submit_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
Definition: qsvvpp.c:403
AVFilterContext::outputs
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:421