FFmpeg
qsvvpp.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 /**
20  * @file
21  * Intel Quick Sync Video VPP base function
22  */
23 
24 #include "libavutil/common.h"
25 #include "libavutil/mathematics.h"
26 #include "libavutil/hwcontext.h"
28 #include "libavutil/time.h"
29 #include "libavutil/pixdesc.h"
30 
31 #include "internal.h"
32 #include "qsvvpp.h"
33 #include "video.h"
34 
35 #define IS_VIDEO_MEMORY(mode) (mode & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | \
36  MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET))
37 #define IS_OPAQUE_MEMORY(mode) (mode & MFX_MEMTYPE_OPAQUE_FRAME)
38 #define IS_SYSTEM_MEMORY(mode) (mode & MFX_MEMTYPE_SYSTEM_MEMORY)
39 
40 static const mfxHandleType handle_types[] = {
41  MFX_HANDLE_VA_DISPLAY,
42  MFX_HANDLE_D3D9_DEVICE_MANAGER,
43  MFX_HANDLE_D3D11_DEVICE,
44 };
45 
46 static const AVRational default_tb = { 1, 90000 };
47 
48 static const struct {
50  const char *desc;
51 } qsv_iopatterns[] = {
52  {MFX_IOPATTERN_IN_VIDEO_MEMORY, "input is video memory surface" },
53  {MFX_IOPATTERN_IN_SYSTEM_MEMORY, "input is system memory surface" },
54  {MFX_IOPATTERN_IN_OPAQUE_MEMORY, "input is opaque memory surface" },
55  {MFX_IOPATTERN_OUT_VIDEO_MEMORY, "output is video memory surface" },
56  {MFX_IOPATTERN_OUT_SYSTEM_MEMORY, "output is system memory surface" },
57  {MFX_IOPATTERN_OUT_OPAQUE_MEMORY, "output is opaque memory surface" },
58 };
59 
61  const char *extra_string)
62 {
63  const char *desc = NULL;
64 
65  for (int i = 0; i < FF_ARRAY_ELEMS(qsv_iopatterns); i++) {
67  desc = qsv_iopatterns[i].desc;
68  }
69  }
70  if (!desc)
71  desc = "unknown iopattern";
72 
73  av_log(log_ctx, AV_LOG_VERBOSE, "%s: %s\n", extra_string, desc);
74  return 0;
75 }
76 
77 static const struct {
78  mfxStatus mfxerr;
79  int averr;
80  const char *desc;
81 } qsv_errors[] = {
82  { MFX_ERR_NONE, 0, "success" },
83  { MFX_ERR_UNKNOWN, AVERROR_UNKNOWN, "unknown error" },
84  { MFX_ERR_NULL_PTR, AVERROR(EINVAL), "NULL pointer" },
85  { MFX_ERR_UNSUPPORTED, AVERROR(ENOSYS), "unsupported" },
86  { MFX_ERR_MEMORY_ALLOC, AVERROR(ENOMEM), "failed to allocate memory" },
87  { MFX_ERR_NOT_ENOUGH_BUFFER, AVERROR(ENOMEM), "insufficient input/output buffer" },
88  { MFX_ERR_INVALID_HANDLE, AVERROR(EINVAL), "invalid handle" },
89  { MFX_ERR_LOCK_MEMORY, AVERROR(EIO), "failed to lock the memory block" },
90  { MFX_ERR_NOT_INITIALIZED, AVERROR_BUG, "not initialized" },
91  { MFX_ERR_NOT_FOUND, AVERROR(ENOSYS), "specified object was not found" },
92  /* the following 3 errors should always be handled explicitly, so those "mappings"
93  * are for completeness only */
94  { MFX_ERR_MORE_DATA, AVERROR_UNKNOWN, "expect more data at input" },
95  { MFX_ERR_MORE_SURFACE, AVERROR_UNKNOWN, "expect more surface at output" },
96  { MFX_ERR_MORE_BITSTREAM, AVERROR_UNKNOWN, "expect more bitstream at output" },
97  { MFX_ERR_ABORTED, AVERROR_UNKNOWN, "operation aborted" },
98  { MFX_ERR_DEVICE_LOST, AVERROR(EIO), "device lost" },
99  { MFX_ERR_INCOMPATIBLE_VIDEO_PARAM, AVERROR(EINVAL), "incompatible video parameters" },
100  { MFX_ERR_INVALID_VIDEO_PARAM, AVERROR(EINVAL), "invalid video parameters" },
101  { MFX_ERR_UNDEFINED_BEHAVIOR, AVERROR_BUG, "undefined behavior" },
102  { MFX_ERR_DEVICE_FAILED, AVERROR(EIO), "device failed" },
103  { MFX_ERR_INCOMPATIBLE_AUDIO_PARAM, AVERROR(EINVAL), "incompatible audio parameters" },
104  { MFX_ERR_INVALID_AUDIO_PARAM, AVERROR(EINVAL), "invalid audio parameters" },
105 
106  { MFX_WRN_IN_EXECUTION, 0, "operation in execution" },
107  { MFX_WRN_DEVICE_BUSY, 0, "device busy" },
108  { MFX_WRN_VIDEO_PARAM_CHANGED, 0, "video parameters changed" },
109  { MFX_WRN_PARTIAL_ACCELERATION, 0, "partial acceleration" },
110  { MFX_WRN_INCOMPATIBLE_VIDEO_PARAM, 0, "incompatible video parameters" },
111  { MFX_WRN_VALUE_NOT_CHANGED, 0, "value is saturated" },
112  { MFX_WRN_OUT_OF_RANGE, 0, "value out of range" },
113  { MFX_WRN_FILTER_SKIPPED, 0, "filter skipped" },
114  { MFX_WRN_INCOMPATIBLE_AUDIO_PARAM, 0, "incompatible audio parameters" },
115 };
116 
117 static int qsv_map_error(mfxStatus mfx_err, const char **desc)
118 {
119  int i;
120  for (i = 0; i < FF_ARRAY_ELEMS(qsv_errors); i++) {
121  if (qsv_errors[i].mfxerr == mfx_err) {
122  if (desc)
123  *desc = qsv_errors[i].desc;
124  return qsv_errors[i].averr;
125  }
126  }
127  if (desc)
128  *desc = "unknown error";
129  return AVERROR_UNKNOWN;
130 }
131 
132 int ff_qsvvpp_print_error(void *log_ctx, mfxStatus err,
133  const char *error_string)
134 {
135  const char *desc;
136  int ret;
137  ret = qsv_map_error(err, &desc);
138  av_log(log_ctx, AV_LOG_ERROR, "%s: %s (%d)\n", error_string, desc, err);
139  return ret;
140 }
141 
142 int ff_qsvvpp_print_warning(void *log_ctx, mfxStatus err,
143  const char *warning_string)
144 {
145  const char *desc;
146  int ret;
147  ret = qsv_map_error(err, &desc);
148  av_log(log_ctx, AV_LOG_WARNING, "%s: %s (%d)\n", warning_string, desc, err);
149  return ret;
150 }
151 
152 /* functions for frameAlloc */
153 static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
154  mfxFrameAllocResponse *resp)
155 {
156  QSVVPPContext *s = pthis;
157  int i;
158 
159  if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
160  !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
161  !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
162  return MFX_ERR_UNSUPPORTED;
163 
164  if (req->Type & MFX_MEMTYPE_FROM_VPPIN) {
165  resp->mids = av_mallocz(s->nb_surface_ptrs_in * sizeof(*resp->mids));
166  if (!resp->mids)
167  return AVERROR(ENOMEM);
168 
169  for (i = 0; i < s->nb_surface_ptrs_in; i++)
170  resp->mids[i] = s->surface_ptrs_in[i]->Data.MemId;
171 
172  resp->NumFrameActual = s->nb_surface_ptrs_in;
173  } else {
174  resp->mids = av_mallocz(s->nb_surface_ptrs_out * sizeof(*resp->mids));
175  if (!resp->mids)
176  return AVERROR(ENOMEM);
177 
178  for (i = 0; i < s->nb_surface_ptrs_out; i++)
179  resp->mids[i] = s->surface_ptrs_out[i]->Data.MemId;
180 
181  resp->NumFrameActual = s->nb_surface_ptrs_out;
182  }
183 
184  return MFX_ERR_NONE;
185 }
186 
187 static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
188 {
189  av_freep(&resp->mids);
190  return MFX_ERR_NONE;
191 }
192 
193 static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
194 {
195  return MFX_ERR_UNSUPPORTED;
196 }
197 
198 static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
199 {
200  return MFX_ERR_UNSUPPORTED;
201 }
202 
203 static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
204 {
205  *hdl = mid;
206  return MFX_ERR_NONE;
207 }
208 
210 {
211  switch (format) {
212  case AV_PIX_FMT_YUV420P:
213  return MFX_FOURCC_YV12;
214  case AV_PIX_FMT_NV12:
215  return MFX_FOURCC_NV12;
216  case AV_PIX_FMT_YUYV422:
217  return MFX_FOURCC_YUY2;
218  case AV_PIX_FMT_BGRA:
219  return MFX_FOURCC_RGB4;
220  }
221 
222  return MFX_FOURCC_NV12;
223 }
224 
225 static int map_frame_to_surface(AVFrame *frame, mfxFrameSurface1 *surface)
226 {
227  switch (frame->format) {
228  case AV_PIX_FMT_NV12:
229  case AV_PIX_FMT_P010:
230  surface->Data.Y = frame->data[0];
231  surface->Data.UV = frame->data[1];
232  break;
233  case AV_PIX_FMT_YUV420P:
234  surface->Data.Y = frame->data[0];
235  surface->Data.U = frame->data[1];
236  surface->Data.V = frame->data[2];
237  break;
238  case AV_PIX_FMT_YUYV422:
239  surface->Data.Y = frame->data[0];
240  surface->Data.U = frame->data[0] + 1;
241  surface->Data.V = frame->data[0] + 3;
242  break;
243  case AV_PIX_FMT_RGB32:
244  surface->Data.B = frame->data[0];
245  surface->Data.G = frame->data[0] + 1;
246  surface->Data.R = frame->data[0] + 2;
247  surface->Data.A = frame->data[0] + 3;
248  break;
249  default:
250  return MFX_ERR_UNSUPPORTED;
251  }
252  surface->Data.Pitch = frame->linesize[0];
253 
254  return 0;
255 }
256 
257 /* fill the surface info */
258 static int fill_frameinfo_by_link(mfxFrameInfo *frameinfo, AVFilterLink *link)
259 {
260  enum AVPixelFormat pix_fmt;
261  AVHWFramesContext *frames_ctx;
262  AVQSVFramesContext *frames_hwctx;
263  const AVPixFmtDescriptor *desc;
264 
265  if (link->format == AV_PIX_FMT_QSV) {
266  if (!link->hw_frames_ctx)
267  return AVERROR(EINVAL);
268 
269  frames_ctx = (AVHWFramesContext *)link->hw_frames_ctx->data;
270  frames_hwctx = frames_ctx->hwctx;
271  *frameinfo = frames_hwctx->surfaces[0].Info;
272  } else {
273  pix_fmt = link->format;
275  if (!desc)
276  return AVERROR_BUG;
277 
278  frameinfo->CropX = 0;
279  frameinfo->CropY = 0;
280  frameinfo->Width = FFALIGN(link->w, 32);
281  frameinfo->Height = FFALIGN(link->h, 32);
282  frameinfo->PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
283  frameinfo->FourCC = pix_fmt_to_mfx_fourcc(pix_fmt);
284  frameinfo->BitDepthLuma = desc->comp[0].depth;
285  frameinfo->BitDepthChroma = desc->comp[0].depth;
286  frameinfo->Shift = desc->comp[0].depth > 8;
287  if (desc->log2_chroma_w && desc->log2_chroma_h)
288  frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV420;
289  else if (desc->log2_chroma_w)
290  frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV422;
291  else
292  frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV444;
293  }
294 
295  frameinfo->CropW = link->w;
296  frameinfo->CropH = link->h;
297  frameinfo->FrameRateExtN = link->frame_rate.num;
298  frameinfo->FrameRateExtD = link->frame_rate.den;
299  frameinfo->AspectRatioW = link->sample_aspect_ratio.num ? link->sample_aspect_ratio.num : 1;
300  frameinfo->AspectRatioH = link->sample_aspect_ratio.den ? link->sample_aspect_ratio.den : 1;
301 
302  return 0;
303 }
304 
306 {
307  while (list) {
308  /* list->queued==1 means the frame is not cached in VPP
309  * process any more, it can be released to pool. */
310  if ((list->queued == 1) && !list->surface.Data.Locked) {
311  av_frame_free(&list->frame);
312  list->queued = 0;
313  }
314  list = list->next;
315  }
316 }
317 
319 {
320  while (*list) {
321  QSVFrame *frame;
322 
323  frame = *list;
324  *list = (*list)->next;
325  av_frame_free(&frame->frame);
326  av_freep(&frame);
327  }
328 }
329 
331 {
332  QSVFrame *out = *list;
333 
334  for (; out; out = out->next) {
335  if (!out->queued) {
336  out->queued = 1;
337  break;
338  }
339  }
340 
341  if (!out) {
342  out = av_mallocz(sizeof(*out));
343  if (!out) {
344  av_log(NULL, AV_LOG_ERROR, "Can't alloc new output frame.\n");
345  return NULL;
346  }
347  out->queued = 1;
348  out->next = *list;
349  *list = out;
350  }
351 
352  return out;
353 }
354 
355 /* get the input surface */
357 {
358  QSVFrame *qsv_frame;
359  AVFilterContext *ctx = inlink->dst;
360 
361  clear_unused_frames(s->in_frame_list);
362 
363  qsv_frame = get_free_frame(&s->in_frame_list);
364  if (!qsv_frame)
365  return NULL;
366 
367  /* Turn AVFrame into mfxFrameSurface1.
368  * For video/opaque memory mode, pix_fmt is AV_PIX_FMT_QSV, and
369  * mfxFrameSurface1 is stored in AVFrame->data[3];
370  * for system memory mode, raw video data is stored in
371  * AVFrame, we should map it into mfxFrameSurface1.
372  */
373  if (!IS_SYSTEM_MEMORY(s->in_mem_mode)) {
374  if (picref->format != AV_PIX_FMT_QSV) {
375  av_log(ctx, AV_LOG_ERROR, "QSVVPP gets a wrong frame.\n");
376  return NULL;
377  }
378  qsv_frame->frame = av_frame_clone(picref);
379  qsv_frame->surface = *(mfxFrameSurface1 *)qsv_frame->frame->data[3];
380  } else {
381  /* make a copy if the input is not padded as libmfx requires */
382  if (picref->height & 31 || picref->linesize[0] & 31) {
383  qsv_frame->frame = ff_get_video_buffer(inlink,
384  FFALIGN(inlink->w, 32),
385  FFALIGN(inlink->h, 32));
386  if (!qsv_frame->frame)
387  return NULL;
388 
389  qsv_frame->frame->width = picref->width;
390  qsv_frame->frame->height = picref->height;
391 
392  if (av_frame_copy(qsv_frame->frame, picref) < 0) {
393  av_frame_free(&qsv_frame->frame);
394  return NULL;
395  }
396 
397  av_frame_copy_props(qsv_frame->frame, picref);
398  } else
399  qsv_frame->frame = av_frame_clone(picref);
400 
401  if (map_frame_to_surface(qsv_frame->frame,
402  &qsv_frame->surface) < 0) {
403  av_log(ctx, AV_LOG_ERROR, "Unsupported frame.\n");
404  return NULL;
405  }
406  }
407 
408  qsv_frame->surface.Info = s->frame_infos[FF_INLINK_IDX(inlink)];
409  qsv_frame->surface.Data.TimeStamp = av_rescale_q(qsv_frame->frame->pts,
410  inlink->time_base, default_tb);
411 
412  qsv_frame->surface.Info.PicStruct =
413  !qsv_frame->frame->interlaced_frame ? MFX_PICSTRUCT_PROGRESSIVE :
414  (qsv_frame->frame->top_field_first ? MFX_PICSTRUCT_FIELD_TFF :
415  MFX_PICSTRUCT_FIELD_BFF);
416  if (qsv_frame->frame->repeat_pict == 1)
417  qsv_frame->surface.Info.PicStruct |= MFX_PICSTRUCT_FIELD_REPEATED;
418  else if (qsv_frame->frame->repeat_pict == 2)
419  qsv_frame->surface.Info.PicStruct |= MFX_PICSTRUCT_FRAME_DOUBLING;
420  else if (qsv_frame->frame->repeat_pict == 4)
421  qsv_frame->surface.Info.PicStruct |= MFX_PICSTRUCT_FRAME_TRIPLING;
422 
423  return qsv_frame;
424 }
425 
426 /* get the output surface */
428 {
429  AVFilterContext *ctx = outlink->src;
430  QSVFrame *out_frame;
431  int ret;
432 
433  clear_unused_frames(s->out_frame_list);
434 
435  out_frame = get_free_frame(&s->out_frame_list);
436  if (!out_frame)
437  return NULL;
438 
439  /* For video memory, get a hw frame;
440  * For system memory, get a sw frame and map it into a mfx_surface. */
441  if (!IS_SYSTEM_MEMORY(s->out_mem_mode)) {
442  out_frame->frame = av_frame_alloc();
443  if (!out_frame->frame)
444  return NULL;
445 
446  ret = av_hwframe_get_buffer(outlink->hw_frames_ctx, out_frame->frame, 0);
447  if (ret < 0) {
448  av_log(ctx, AV_LOG_ERROR, "Can't allocate a surface.\n");
449  return NULL;
450  }
451 
452  out_frame->surface = *(mfxFrameSurface1 *)out_frame->frame->data[3];
453  } else {
454  /* Get a frame with aligned dimensions.
455  * Libmfx need system memory being 128x64 aligned */
456  out_frame->frame = ff_get_video_buffer(outlink,
457  FFALIGN(outlink->w, 128),
458  FFALIGN(outlink->h, 64));
459  if (!out_frame->frame)
460  return NULL;
461 
462  out_frame->frame->width = outlink->w;
463  out_frame->frame->height = outlink->h;
464 
465  ret = map_frame_to_surface(out_frame->frame,
466  &out_frame->surface);
467  if (ret < 0)
468  return NULL;
469  }
470 
471  out_frame->surface.Info = s->vpp_param.vpp.Out;
472 
473  return out_frame;
474 }
475 
476 /* create the QSV session */
478 {
479  AVFilterLink *inlink = avctx->inputs[0];
480  AVFilterLink *outlink = avctx->outputs[0];
481  AVQSVFramesContext *in_frames_hwctx = NULL;
482  AVQSVFramesContext *out_frames_hwctx = NULL;
483 
484  AVBufferRef *device_ref;
485  AVHWDeviceContext *device_ctx;
486  AVQSVDeviceContext *device_hwctx;
487  mfxHDL handle;
488  mfxHandleType handle_type;
489  mfxVersion ver;
490  mfxIMPL impl;
491  int ret, i;
492 
493  if (inlink->hw_frames_ctx) {
494  AVHWFramesContext *frames_ctx = (AVHWFramesContext *)inlink->hw_frames_ctx->data;
495 
496  device_ref = frames_ctx->device_ref;
497  in_frames_hwctx = frames_ctx->hwctx;
498 
499  s->in_mem_mode = in_frames_hwctx->frame_type;
500 
501  s->surface_ptrs_in = av_mallocz_array(in_frames_hwctx->nb_surfaces,
502  sizeof(*s->surface_ptrs_in));
503  if (!s->surface_ptrs_in)
504  return AVERROR(ENOMEM);
505 
506  for (i = 0; i < in_frames_hwctx->nb_surfaces; i++)
507  s->surface_ptrs_in[i] = in_frames_hwctx->surfaces + i;
508 
509  s->nb_surface_ptrs_in = in_frames_hwctx->nb_surfaces;
510  } else if (avctx->hw_device_ctx) {
511  device_ref = avctx->hw_device_ctx;
512  s->in_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
513  } else {
514  av_log(avctx, AV_LOG_ERROR, "No hw context provided.\n");
515  return AVERROR(EINVAL);
516  }
517 
518  device_ctx = (AVHWDeviceContext *)device_ref->data;
519  device_hwctx = device_ctx->hwctx;
520 
521  if (outlink->format == AV_PIX_FMT_QSV) {
522  AVHWFramesContext *out_frames_ctx;
523  AVBufferRef *out_frames_ref = av_hwframe_ctx_alloc(device_ref);
524  if (!out_frames_ref)
525  return AVERROR(ENOMEM);
526 
527  s->out_mem_mode = IS_OPAQUE_MEMORY(s->in_mem_mode) ?
528  MFX_MEMTYPE_OPAQUE_FRAME :
529  MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
530 
531  out_frames_ctx = (AVHWFramesContext *)out_frames_ref->data;
532  out_frames_hwctx = out_frames_ctx->hwctx;
533 
534  out_frames_ctx->format = AV_PIX_FMT_QSV;
535  out_frames_ctx->width = FFALIGN(outlink->w, 32);
536  out_frames_ctx->height = FFALIGN(outlink->h, 32);
537  out_frames_ctx->sw_format = s->out_sw_format;
538  out_frames_ctx->initial_pool_size = 64;
539  if (avctx->extra_hw_frames > 0)
540  out_frames_ctx->initial_pool_size += avctx->extra_hw_frames;
541  out_frames_hwctx->frame_type = s->out_mem_mode;
542 
543  ret = av_hwframe_ctx_init(out_frames_ref);
544  if (ret < 0) {
545  av_buffer_unref(&out_frames_ref);
546  av_log(avctx, AV_LOG_ERROR, "Error creating frames_ctx for output pad.\n");
547  return ret;
548  }
549 
550  s->surface_ptrs_out = av_mallocz_array(out_frames_hwctx->nb_surfaces,
551  sizeof(*s->surface_ptrs_out));
552  if (!s->surface_ptrs_out) {
553  av_buffer_unref(&out_frames_ref);
554  return AVERROR(ENOMEM);
555  }
556 
557  for (i = 0; i < out_frames_hwctx->nb_surfaces; i++)
558  s->surface_ptrs_out[i] = out_frames_hwctx->surfaces + i;
559  s->nb_surface_ptrs_out = out_frames_hwctx->nb_surfaces;
560 
561  av_buffer_unref(&outlink->hw_frames_ctx);
562  outlink->hw_frames_ctx = out_frames_ref;
563  } else
564  s->out_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
565 
566  /* extract the properties of the "master" session given to us */
567  ret = MFXQueryIMPL(device_hwctx->session, &impl);
568  if (ret == MFX_ERR_NONE)
569  ret = MFXQueryVersion(device_hwctx->session, &ver);
570  if (ret != MFX_ERR_NONE) {
571  av_log(avctx, AV_LOG_ERROR, "Error querying the session attributes\n");
572  return AVERROR_UNKNOWN;
573  }
574 
575  for (i = 0; i < FF_ARRAY_ELEMS(handle_types); i++) {
576  ret = MFXVideoCORE_GetHandle(device_hwctx->session, handle_types[i], &handle);
577  if (ret == MFX_ERR_NONE) {
579  break;
580  }
581  }
582 
583  if (ret < 0)
584  return ff_qsvvpp_print_error(avctx, ret, "Error getting the session handle");
585  else if (ret > 0) {
586  ff_qsvvpp_print_warning(avctx, ret, "Warning in getting the session handle");
587  return AVERROR_UNKNOWN;
588  }
589 
590  /* create a "slave" session with those same properties, to be used for vpp */
591  ret = MFXInit(impl, &ver, &s->session);
592  if (ret < 0)
593  return ff_qsvvpp_print_error(avctx, ret, "Error initializing a session");
594  else if (ret > 0) {
595  ff_qsvvpp_print_warning(avctx, ret, "Warning in session initialization");
596  return AVERROR_UNKNOWN;
597  }
598 
599  if (handle) {
600  ret = MFXVideoCORE_SetHandle(s->session, handle_type, handle);
601  if (ret != MFX_ERR_NONE)
602  return AVERROR_UNKNOWN;
603  }
604 
605  if (QSV_RUNTIME_VERSION_ATLEAST(ver, 1, 25)) {
606  ret = MFXJoinSession(device_hwctx->session, s->session);
607  if (ret != MFX_ERR_NONE)
608  return AVERROR_UNKNOWN;
609  }
610 
611  if (IS_OPAQUE_MEMORY(s->in_mem_mode) || IS_OPAQUE_MEMORY(s->out_mem_mode)) {
612  s->opaque_alloc.In.Surfaces = s->surface_ptrs_in;
613  s->opaque_alloc.In.NumSurface = s->nb_surface_ptrs_in;
614  s->opaque_alloc.In.Type = s->in_mem_mode;
615 
616  s->opaque_alloc.Out.Surfaces = s->surface_ptrs_out;
617  s->opaque_alloc.Out.NumSurface = s->nb_surface_ptrs_out;
618  s->opaque_alloc.Out.Type = s->out_mem_mode;
619 
620  s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
621  s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
622  } else if (IS_VIDEO_MEMORY(s->in_mem_mode) || IS_VIDEO_MEMORY(s->out_mem_mode)) {
623  mfxFrameAllocator frame_allocator = {
624  .pthis = s,
625  .Alloc = frame_alloc,
626  .Lock = frame_lock,
627  .Unlock = frame_unlock,
628  .GetHDL = frame_get_hdl,
629  .Free = frame_free,
630  };
631 
632  ret = MFXVideoCORE_SetFrameAllocator(s->session, &frame_allocator);
633  if (ret != MFX_ERR_NONE)
634  return AVERROR_UNKNOWN;
635  }
636 
637  return 0;
638 }
639 
640 static unsigned int qsv_fifo_item_size(void)
641 {
642  return sizeof(mfxSyncPoint) + sizeof(QSVFrame*);
643 }
644 
645 static unsigned int qsv_fifo_size(const AVFifoBuffer* fifo)
646 {
647  return av_fifo_size(fifo)/qsv_fifo_item_size();
648 }
649 
651 {
652  int i;
653  int ret;
654  QSVVPPContext *s;
655 
656  s = av_mallocz(sizeof(*s));
657  if (!s)
658  return AVERROR(ENOMEM);
659 
660  s->filter_frame = param->filter_frame;
661  if (!s->filter_frame)
662  s->filter_frame = ff_filter_frame;
663  s->out_sw_format = param->out_sw_format;
664 
665  /* create the vpp session */
666  ret = init_vpp_session(avctx, s);
667  if (ret < 0)
668  goto failed;
669 
670  s->frame_infos = av_mallocz_array(avctx->nb_inputs, sizeof(*s->frame_infos));
671  if (!s->frame_infos) {
672  ret = AVERROR(ENOMEM);
673  goto failed;
674  }
675 
676  /* Init each input's information */
677  for (i = 0; i < avctx->nb_inputs; i++) {
678  ret = fill_frameinfo_by_link(&s->frame_infos[i], avctx->inputs[i]);
679  if (ret < 0)
680  goto failed;
681  }
682 
683  /* Update input's frame info according to crop */
684  for (i = 0; i < param->num_crop; i++) {
685  QSVVPPCrop *crop = param->crop + i;
686  if (crop->in_idx > avctx->nb_inputs) {
687  ret = AVERROR(EINVAL);
688  goto failed;
689  }
690  s->frame_infos[crop->in_idx].CropX = crop->x;
691  s->frame_infos[crop->in_idx].CropY = crop->y;
692  s->frame_infos[crop->in_idx].CropW = crop->w;
693  s->frame_infos[crop->in_idx].CropH = crop->h;
694  }
695 
696  s->vpp_param.vpp.In = s->frame_infos[0];
697 
698  ret = fill_frameinfo_by_link(&s->vpp_param.vpp.Out, avctx->outputs[0]);
699  if (ret < 0) {
700  av_log(avctx, AV_LOG_ERROR, "Fail to get frame info from link.\n");
701  goto failed;
702  }
703 
704  if (IS_OPAQUE_MEMORY(s->in_mem_mode) || IS_OPAQUE_MEMORY(s->out_mem_mode)) {
705  s->nb_ext_buffers = param->num_ext_buf + 1;
706  s->ext_buffers = av_mallocz_array(s->nb_ext_buffers, sizeof(*s->ext_buffers));
707  if (!s->ext_buffers) {
708  ret = AVERROR(ENOMEM);
709  goto failed;
710  }
711 
712  s->ext_buffers[0] = (mfxExtBuffer *)&s->opaque_alloc;
713  for (i = 1; i < param->num_ext_buf; i++)
714  s->ext_buffers[i] = param->ext_buf[i - 1];
715  s->vpp_param.ExtParam = s->ext_buffers;
716  s->vpp_param.NumExtParam = s->nb_ext_buffers;
717  } else {
718  s->vpp_param.NumExtParam = param->num_ext_buf;
719  s->vpp_param.ExtParam = param->ext_buf;
720  }
721 
722  s->got_frame = 0;
723 
724  /** keep fifo size at least 1. Even when async_depth is 0, fifo is used. */
725  s->async_fifo = av_fifo_alloc((param->async_depth + 1) * qsv_fifo_item_size());
726  s->async_depth = param->async_depth;
727  if (!s->async_fifo) {
728  ret = AVERROR(ENOMEM);
729  goto failed;
730  }
731 
732  s->vpp_param.AsyncDepth = param->async_depth;
733 
734  if (IS_SYSTEM_MEMORY(s->in_mem_mode))
735  s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_SYSTEM_MEMORY;
736  else if (IS_VIDEO_MEMORY(s->in_mem_mode))
737  s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_VIDEO_MEMORY;
738  else if (IS_OPAQUE_MEMORY(s->in_mem_mode))
739  s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_OPAQUE_MEMORY;
740 
741  if (IS_SYSTEM_MEMORY(s->out_mem_mode))
742  s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
743  else if (IS_VIDEO_MEMORY(s->out_mem_mode))
744  s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_VIDEO_MEMORY;
745  else if (IS_OPAQUE_MEMORY(s->out_mem_mode))
746  s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_OPAQUE_MEMORY;
747 
748  /* Print input memory mode */
749  ff_qsvvpp_print_iopattern(avctx, s->vpp_param.IOPattern & 0x0F, "VPP");
750  /* Print output memory mode */
751  ff_qsvvpp_print_iopattern(avctx, s->vpp_param.IOPattern & 0xF0, "VPP");
752  ret = MFXVideoVPP_Init(s->session, &s->vpp_param);
753  if (ret < 0) {
754  ret = ff_qsvvpp_print_error(avctx, ret, "Failed to create a qsvvpp");
755  goto failed;
756  } else if (ret > 0)
757  ff_qsvvpp_print_warning(avctx, ret, "Warning When creating qsvvpp");
758 
759  *vpp = s;
760  return 0;
761 
762 failed:
763  ff_qsvvpp_free(&s);
764 
765  return ret;
766 }
767 
769 {
770  QSVVPPContext *s = *vpp;
771 
772  if (!s)
773  return 0;
774 
775  if (s->session) {
776  MFXVideoVPP_Close(s->session);
777  MFXClose(s->session);
778  }
779 
780  /* release all the resources */
781  clear_frame_list(&s->in_frame_list);
782  clear_frame_list(&s->out_frame_list);
783  av_freep(&s->surface_ptrs_in);
784  av_freep(&s->surface_ptrs_out);
785  av_freep(&s->ext_buffers);
786  av_freep(&s->frame_infos);
787  av_fifo_free(s->async_fifo);
788  av_freep(vpp);
789 
790  return 0;
791 }
792 
794 {
795  AVFilterContext *ctx = inlink->dst;
796  AVFilterLink *outlink = ctx->outputs[0];
797  mfxSyncPoint sync;
798  QSVFrame *in_frame, *out_frame, *tmp;
799  int ret, filter_ret;
800 
801  while (s->eof && qsv_fifo_size(s->async_fifo)) {
802  av_fifo_generic_read(s->async_fifo, &tmp, sizeof(tmp), NULL);
803  av_fifo_generic_read(s->async_fifo, &sync, sizeof(sync), NULL);
804  if (MFXVideoCORE_SyncOperation(s->session, sync, 1000) < 0)
805  av_log(ctx, AV_LOG_WARNING, "Sync failed.\n");
806 
807  filter_ret = s->filter_frame(outlink, tmp->frame);
808  if (filter_ret < 0) {
809  av_frame_free(&tmp->frame);
810  ret = filter_ret;
811  break;
812  }
813  tmp->queued--;
814  s->got_frame = 1;
815  tmp->frame = NULL;
816  };
817 
818  if (!picref)
819  return 0;
820 
821  in_frame = submit_frame(s, inlink, picref);
822  if (!in_frame) {
823  av_log(ctx, AV_LOG_ERROR, "Failed to submit frame on input[%d]\n",
825  return AVERROR(ENOMEM);
826  }
827 
828  do {
829  out_frame = query_frame(s, outlink);
830  if (!out_frame) {
831  av_log(ctx, AV_LOG_ERROR, "Failed to query an output frame.\n");
832  return AVERROR(ENOMEM);
833  }
834 
835  do {
836  ret = MFXVideoVPP_RunFrameVPPAsync(s->session, &in_frame->surface,
837  &out_frame->surface, NULL, &sync);
838  if (ret == MFX_WRN_DEVICE_BUSY)
839  av_usleep(500);
840  } while (ret == MFX_WRN_DEVICE_BUSY);
841 
842  if (ret < 0 && ret != MFX_ERR_MORE_SURFACE) {
843  /* Ignore more_data error */
844  if (ret == MFX_ERR_MORE_DATA)
845  ret = AVERROR(EAGAIN);
846  break;
847  }
848  out_frame->frame->pts = av_rescale_q(out_frame->surface.Data.TimeStamp,
849  default_tb, outlink->time_base);
850 
851  out_frame->queued++;
852  av_fifo_generic_write(s->async_fifo, &out_frame, sizeof(out_frame), NULL);
853  av_fifo_generic_write(s->async_fifo, &sync, sizeof(sync), NULL);
854 
855 
856  if (qsv_fifo_size(s->async_fifo) > s->async_depth) {
857  av_fifo_generic_read(s->async_fifo, &tmp, sizeof(tmp), NULL);
858  av_fifo_generic_read(s->async_fifo, &sync, sizeof(sync), NULL);
859 
860  do {
861  ret = MFXVideoCORE_SyncOperation(s->session, sync, 1000);
862  } while (ret == MFX_WRN_IN_EXECUTION);
863 
864  filter_ret = s->filter_frame(outlink, tmp->frame);
865  if (filter_ret < 0) {
866  av_frame_free(&tmp->frame);
867  ret = filter_ret;
868  break;
869  }
870 
871  tmp->queued--;
872  s->got_frame = 1;
873  tmp->frame = NULL;
874  }
875  } while(ret == MFX_ERR_MORE_SURFACE);
876 
877  return ret;
878 }
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:99
frame_get_hdl
static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
Definition: qsvvpp.c:203
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
QSVVPPCrop::in_idx
int in_idx
Input index.
Definition: qsvvpp.h:80
AVQSVFramesContext::frame_type
int frame_type
A combination of MFX_MEMTYPE_* describing the frame pool.
Definition: hwcontext_qsv.h:49
mfx_iopattern
int mfx_iopattern
Definition: qsvvpp.c:49
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:200
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
handle_types
static const mfxHandleType handle_types[]
Definition: qsvvpp.c:40
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
IS_OPAQUE_MEMORY
#define IS_OPAQUE_MEMORY(mode)
Definition: qsvvpp.c:37
av_fifo_generic_write
int av_fifo_generic_write(AVFifoBuffer *f, void *src, int size, int(*func)(void *, void *, int))
Feed data from a user-supplied callback to an AVFifoBuffer.
Definition: fifo.c:122
out
FILE * out
Definition: movenc.c:54
init_vpp_session
static int init_vpp_session(AVFilterContext *avctx, QSVVPPContext *s)
Definition: qsvvpp.c:477
frame_alloc
static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
Definition: qsvvpp.c:153
QSVVPPParam::crop
QSVVPPCrop * crop
Definition: qsvvpp.h:97
QSVVPPParam::out_sw_format
enum AVPixelFormat out_sw_format
Definition: qsvvpp.h:93
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1094
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2573
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:92
av_fifo_free
void av_fifo_free(AVFifoBuffer *f)
Free an AVFifoBuffer.
Definition: fifo.c:55
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:209
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:204
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:333
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:324
tmp
static uint8_t tmp[11]
Definition: aes_ctr.c:27
pixdesc.h
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:417
AVFrame::width
int width
Definition: frame.h:382
AVQSVDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:247
AVFrame::top_field_first
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:476
av_mallocz_array
void * av_mallocz_array(size_t nmemb, size_t size)
Definition: mem.c:190
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:210
AVFilterContext::hw_device_ctx
AVBufferRef * hw_device_ctx
For filters which will create hardware frames, sets the device the filter should create them in.
Definition: avfilter.h:397
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:95
mathematics.h
av_fifo_generic_read
int av_fifo_generic_read(AVFifoBuffer *f, void *dest, int buf_size, void(*func)(void *, void *, int))
Feed data from an AVFifoBuffer to a user-supplied callback.
Definition: fifo.c:213
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:71
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:229
video.h
QSVFrame::frame
AVFrame * frame
Definition: qsv_internal.h:73
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:338
AVFifoBuffer
Definition: fifo.h:31
qsvvpp.h
AVFilterContext::extra_hw_frames
int extra_hw_frames
Sets the number of extra hardware frames which the filter will allocate on its output links for use i...
Definition: avfilter.h:427
clear_unused_frames
static void clear_unused_frames(QSVFrame *list)
Definition: qsvvpp.c:305
AVRational::num
int num
Numerator.
Definition: rational.h:59
handle_type
mfxHandleType handle_type
Definition: hwcontext_qsv.c:89
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:191
ff_qsvvpp_print_iopattern
int ff_qsvvpp_print_iopattern(void *log_ctx, int mfx_iopattern, const char *extra_string)
Definition: qsvvpp.c:60
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:194
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVHWFramesContext::height
int height
Definition: hwcontext.h:229
QSVVPPCrop::w
int w
Definition: qsvvpp.h:81
s
#define s(width, name)
Definition: cbs_vp9.c:257
ff_qsvvpp_create
int ff_qsvvpp_create(AVFilterContext *avctx, QSVVPPContext **vpp, QSVVPPParam *param)
Definition: qsvvpp.c:650
QSV_RUNTIME_VERSION_ATLEAST
#define QSV_RUNTIME_VERSION_ATLEAST(MFX_VERSION, MAJOR, MINOR)
Definition: qsv_internal.h:59
ctx
AVFormatContext * ctx
Definition: movenc.c:48
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:541
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demuxing_decoding.c:40
default_tb
static const AVRational default_tb
Definition: qsvvpp.c:46
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
av_usleep
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
QSVVPPParam::async_depth
int async_depth
Definition: qsvvpp.h:99
if
if(ret)
Definition: filter_design.txt:179
fill_frameinfo_by_link
static int fill_frameinfo_by_link(mfxFrameInfo *frameinfo, AVFilterLink *link)
Definition: qsvvpp.c:258
QSVFrame
Definition: qsv_internal.h:72
QSVVPPContext
Definition: qsvvpp.h:50
AVQSVFramesContext::surfaces
mfxFrameSurface1 * surfaces
Definition: hwcontext_qsv.h:43
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:659
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:125
QSVVPPParam::num_crop
int num_crop
Definition: qsvvpp.h:96
QSVVPPParam
Definition: qsvvpp.h:84
QSVVPPCrop::x
int x
Definition: qsvvpp.h:81
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
qsv_iopatterns
static const struct @211 qsv_iopatterns[]
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:141
pix_fmt_to_mfx_fourcc
static int pix_fmt_to_mfx_fourcc(int format)
Definition: qsvvpp.c:209
ff_qsvvpp_print_error
int ff_qsvvpp_print_error(void *log_ctx, mfxStatus err, const char *error_string)
Definition: qsvvpp.c:132
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:349
list
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining list
Definition: filter_design.txt:25
QSVFrame::surface
mfxFrameSurface1 surface
Definition: qsv_internal.h:74
time.h
AV_PIX_FMT_QSV
@ AV_PIX_FMT_QSV
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:222
for
for(j=16;j >0;--j)
Definition: h264pred_template.c:469
ff_qsvvpp_free
int ff_qsvvpp_free(QSVVPPContext **vpp)
Definition: qsvvpp.c:768
AVFilterContext::nb_inputs
unsigned nb_inputs
number of input pads
Definition: avfilter.h:350
av_frame_copy
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:800
IS_VIDEO_MEMORY
#define IS_VIDEO_MEMORY(mode)
Definition: qsvvpp.c:35
frame_unlock
static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsvvpp.c:198
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:397
mfxerr
mfxStatus mfxerr
Definition: qsvvpp.c:78
AV_PIX_FMT_RGB32
#define AV_PIX_FMT_RGB32
Definition: pixfmt.h:372
AVQSVFramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_qsv.h:44
internal.h
AVFrame::interlaced_frame
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:471
hwcontext_qsv.h
i
int i
Definition: input.c:407
common.h
desc
const char * desc
Definition: qsvvpp.c:50
QSVVPPParam::num_ext_buf
int num_ext_buf
Definition: qsvvpp.h:89
frame_lock
static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsvvpp.c:193
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:237
QSVVPPParam::filter_frame
int(* filter_frame)(AVFilterLink *outlink, AVFrame *frame)
Definition: qsvvpp.h:86
qsv_fifo_item_size
static unsigned int qsv_fifo_item_size(void)
Definition: qsvvpp.c:640
map_frame_to_surface
static int map_frame_to_surface(AVFrame *frame, mfxFrameSurface1 *surface)
Definition: qsvvpp.c:225
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
ret
ret
Definition: filter_design.txt:187
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
QSVFrame::queued
int queued
Definition: qsv_internal.h:79
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:162
AVFrame::sample_aspect_ratio
AVRational sample_aspect_ratio
Sample aspect ratio for the video frame, 0/1 if unknown/unspecified.
Definition: frame.h:412
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:663
QSVVPPCrop::h
int h
Crop rectangle.
Definition: qsvvpp.h:81
QSVVPPCrop::y
int y
Definition: qsvvpp.h:81
AVFrame::height
int height
Definition: frame.h:382
ff_qsvvpp_filter_frame
int ff_qsvvpp_filter_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
Definition: qsvvpp.c:793
AVQSVDeviceContext::session
mfxSession session
Definition: hwcontext_qsv.h:36
AVRational::den
int den
Denominator.
Definition: rational.h:60
averr
int averr
Definition: qsvvpp.c:79
FF_INLINK_IDX
#define FF_INLINK_IDX(link)
Find the index of a link.
Definition: internal.h:302
qsv_map_error
static int qsv_map_error(mfxStatus mfx_err, const char **desc)
Definition: qsvvpp.c:117
clear_frame_list
static void clear_frame_list(QSVFrame **list)
Definition: qsvvpp.c:318
AVFilterContext
An instance of a filter.
Definition: avfilter.h:341
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:448
AVQSVFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:42
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:199
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:84
av_fifo_size
int av_fifo_size(const AVFifoBuffer *f)
Return the amount of data in bytes in the AVFifoBuffer, that is the amount of data you can read from ...
Definition: fifo.c:77
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:48
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
format
fg outputs[0] format
Definition: ffmpeg_filter.c:177
av_fifo_alloc
AVFifoBuffer * av_fifo_alloc(unsigned int size)
Initialize an AVFifoBuffer.
Definition: fifo.c:43
hwcontext.h
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:50
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:355
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
IS_SYSTEM_MEMORY
#define IS_SYSTEM_MEMORY(mode)
Definition: qsvvpp.c:38
QSVVPPCrop
Definition: qsvvpp.h:79
get_free_frame
static QSVFrame * get_free_frame(QSVFrame **list)
Definition: qsvvpp.c:330
qsv_errors
static const struct @212 qsv_errors[]
av_hwframe_get_buffer
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
Definition: hwcontext.c:502
frame_free
static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
Definition: qsvvpp.c:187
qsv_fifo_size
static unsigned int qsv_fifo_size(const AVFifoBuffer *fifo)
Definition: qsvvpp.c:645
ff_qsvvpp_print_warning
int ff_qsvvpp_print_warning(void *log_ctx, mfxStatus err, const char *warning_string)
Definition: qsvvpp.c:142
AVFrame::repeat_pict
int repeat_pict
When decoding, this signals how much the picture must be delayed.
Definition: frame.h:466
QSVVPPParam::ext_buf
mfxExtBuffer ** ext_buf
Definition: qsvvpp.h:90
submit_frame
static QSVFrame * submit_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
Definition: qsvvpp.c:356
AVFilterContext::outputs
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:353
query_frame
static QSVFrame * query_frame(QSVVPPContext *s, AVFilterLink *outlink)
Definition: qsvvpp.c:427