FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
vf_deinterlace_vaapi.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include <string.h>
20 
21 #include <va/va.h>
22 #include <va/va_vpp.h>
23 
24 #include "libavutil/avassert.h"
25 #include "libavutil/common.h"
26 #include "libavutil/hwcontext.h"
28 #include "libavutil/mem.h"
29 #include "libavutil/opt.h"
30 #include "libavutil/pixdesc.h"
31 
32 #include "avfilter.h"
33 #include "formats.h"
34 #include "internal.h"
35 #include "video.h"
36 
37 #define MAX_REFERENCES 8
38 
39 typedef struct DeintVAAPIContext {
40  const AVClass *class;
41 
44 
45  int mode;
48 
49  int valid_ids;
50  VAConfigID va_config;
51  VAContextID va_context;
52 
55 
60 
61  VAProcFilterCapDeinterlacing
62  deint_caps[VAProcDeinterlacingCount];
64  VAProcPipelineCaps pipeline_caps;
65 
70 
71  VABufferID filter_buffer;
73 
74 static const char *deint_vaapi_mode_name(int mode)
75 {
76  switch (mode) {
77 #define D(name) case VAProcDeinterlacing ## name: return #name
78  D(Bob);
79  D(Weave);
80  D(MotionAdaptive);
81  D(MotionCompensated);
82 #undef D
83  default:
84  return "Invalid";
85  }
86 }
87 
89 {
90  enum AVPixelFormat pix_fmts[] = {
92  };
93  int err;
94 
95  if ((err = ff_formats_ref(ff_make_format_list(pix_fmts),
96  &avctx->inputs[0]->out_formats)) < 0)
97  return err;
98  if ((err = ff_formats_ref(ff_make_format_list(pix_fmts),
99  &avctx->outputs[0]->in_formats)) < 0)
100  return err;
101 
102  return 0;
103 }
104 
106 {
107  DeintVAAPIContext *ctx = avctx->priv;
108  int i;
109 
110  for (i = 0; i < ctx->queue_count; i++)
111  av_frame_free(&ctx->frame_queue[i]);
112  ctx->queue_count = 0;
113 
114  if (ctx->filter_buffer != VA_INVALID_ID) {
115  vaDestroyBuffer(ctx->hwctx->display, ctx->filter_buffer);
116  ctx->filter_buffer = VA_INVALID_ID;
117  }
118 
119  if (ctx->va_context != VA_INVALID_ID) {
120  vaDestroyContext(ctx->hwctx->display, ctx->va_context);
121  ctx->va_context = VA_INVALID_ID;
122  }
123 
124  if (ctx->va_config != VA_INVALID_ID) {
125  vaDestroyConfig(ctx->hwctx->display, ctx->va_config);
126  ctx->va_config = VA_INVALID_ID;
127  }
128 
130  ctx->hwctx = NULL;
131 
132  return 0;
133 }
134 
136 {
137  AVFilterContext *avctx = inlink->dst;
138  DeintVAAPIContext *ctx = avctx->priv;
139 
141 
142  if (!inlink->hw_frames_ctx) {
143  av_log(avctx, AV_LOG_ERROR, "A hardware frames reference is "
144  "required to associate the processing device.\n");
145  return AVERROR(EINVAL);
146  }
147 
150 
151  return 0;
152 }
153 
155 {
156  DeintVAAPIContext *ctx = avctx->priv;
157  VAStatus vas;
158  VAProcFilterParameterBufferDeinterlacing params;
159  int i;
160 
161  ctx->nb_deint_caps = VAProcDeinterlacingCount;
162  vas = vaQueryVideoProcFilterCaps(ctx->hwctx->display,
163  ctx->va_context,
164  VAProcFilterDeinterlacing,
165  &ctx->deint_caps,
166  &ctx->nb_deint_caps);
167  if (vas != VA_STATUS_SUCCESS) {
168  av_log(avctx, AV_LOG_ERROR, "Failed to query deinterlacing "
169  "caps: %d (%s).\n", vas, vaErrorStr(vas));
170  return AVERROR(EIO);
171  }
172 
173  if (ctx->mode == VAProcDeinterlacingNone) {
174  for (i = 0; i < ctx->nb_deint_caps; i++) {
175  if (ctx->deint_caps[i].type > ctx->mode)
176  ctx->mode = ctx->deint_caps[i].type;
177  }
178  av_log(avctx, AV_LOG_VERBOSE, "Picking %d (%s) as default "
179  "deinterlacing mode.\n", ctx->mode,
181  } else {
182  for (i = 0; i < ctx->nb_deint_caps; i++) {
183  if (ctx->deint_caps[i].type == ctx->mode)
184  break;
185  }
186  if (i >= ctx->nb_deint_caps) {
187  av_log(avctx, AV_LOG_ERROR, "Deinterlacing mode %d (%s) is "
188  "not supported.\n", ctx->mode,
190  }
191  }
192 
193  params.type = VAProcFilterDeinterlacing;
194  params.algorithm = ctx->mode;
195  params.flags = 0;
196 
197  av_assert0(ctx->filter_buffer == VA_INVALID_ID);
198  vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
199  VAProcFilterParameterBufferType,
200  sizeof(params), 1, &params,
201  &ctx->filter_buffer);
202  if (vas != VA_STATUS_SUCCESS) {
203  av_log(avctx, AV_LOG_ERROR, "Failed to create deinterlace "
204  "parameter buffer: %d (%s).\n", vas, vaErrorStr(vas));
205  return AVERROR(EIO);
206  }
207 
208  vas = vaQueryVideoProcPipelineCaps(ctx->hwctx->display,
209  ctx->va_context,
210  &ctx->filter_buffer, 1,
211  &ctx->pipeline_caps);
212  if (vas != VA_STATUS_SUCCESS) {
213  av_log(avctx, AV_LOG_ERROR, "Failed to query pipeline "
214  "caps: %d (%s).\n", vas, vaErrorStr(vas));
215  return AVERROR(EIO);
216  }
217 
218  ctx->extra_delay_for_timestamps = ctx->field_rate == 2 &&
219  ctx->pipeline_caps.num_backward_references == 0;
220 
221  ctx->queue_depth = ctx->pipeline_caps.num_backward_references +
222  ctx->pipeline_caps.num_forward_references +
224  if (ctx->queue_depth > MAX_REFERENCES) {
225  av_log(avctx, AV_LOG_ERROR, "Pipeline requires too many "
226  "references (%u forward, %u back).\n",
227  ctx->pipeline_caps.num_forward_references,
228  ctx->pipeline_caps.num_backward_references);
229  return AVERROR(ENOSYS);
230  }
231 
232  return 0;
233 }
234 
236 {
237  AVFilterContext *avctx = outlink->src;
238  AVFilterLink *inlink = avctx->inputs[0];
239  DeintVAAPIContext *ctx = avctx->priv;
240  AVVAAPIHWConfig *hwconfig = NULL;
241  AVHWFramesConstraints *constraints = NULL;
242  AVVAAPIFramesContext *va_frames;
243  VAStatus vas;
244  int err;
245 
247 
248  av_assert0(ctx->input_frames);
250  ctx->hwctx = ((AVHWDeviceContext*)ctx->device_ref->data)->hwctx;
251 
252  ctx->output_width = ctx->input_frames->width;
253  ctx->output_height = ctx->input_frames->height;
254 
255  av_assert0(ctx->va_config == VA_INVALID_ID);
256  vas = vaCreateConfig(ctx->hwctx->display, VAProfileNone,
257  VAEntrypointVideoProc, 0, 0, &ctx->va_config);
258  if (vas != VA_STATUS_SUCCESS) {
259  av_log(avctx, AV_LOG_ERROR, "Failed to create processing pipeline "
260  "config: %d (%s).\n", vas, vaErrorStr(vas));
261  err = AVERROR(EIO);
262  goto fail;
263  }
264 
265  hwconfig = av_hwdevice_hwconfig_alloc(ctx->device_ref);
266  if (!hwconfig) {
267  err = AVERROR(ENOMEM);
268  goto fail;
269  }
270  hwconfig->config_id = ctx->va_config;
271 
273  hwconfig);
274  if (!constraints) {
275  err = AVERROR(ENOMEM);
276  goto fail;
277  }
278 
279  if (ctx->output_width < constraints->min_width ||
280  ctx->output_height < constraints->min_height ||
281  ctx->output_width > constraints->max_width ||
282  ctx->output_height > constraints->max_height) {
283  av_log(avctx, AV_LOG_ERROR, "Hardware does not support "
284  "deinterlacing to size %dx%d "
285  "(constraints: width %d-%d height %d-%d).\n",
286  ctx->output_width, ctx->output_height,
287  constraints->min_width, constraints->max_width,
288  constraints->min_height, constraints->max_height);
289  err = AVERROR(EINVAL);
290  goto fail;
291  }
292 
294  if (!ctx->output_frames_ref) {
295  av_log(avctx, AV_LOG_ERROR, "Failed to create HW frame context "
296  "for output.\n");
297  err = AVERROR(ENOMEM);
298  goto fail;
299  }
300 
302 
305  ctx->output_frames->width = ctx->output_width;
306  ctx->output_frames->height = ctx->output_height;
307 
308  // The number of output frames we need is determined by what follows
309  // the filter. If it's an encoder with complex frame reference
310  // structures then this could be very high.
311  ctx->output_frames->initial_pool_size = 10;
312 
314  if (err < 0) {
315  av_log(avctx, AV_LOG_ERROR, "Failed to initialise VAAPI frame "
316  "context for output: %d\n", err);
317  goto fail;
318  }
319 
320  va_frames = ctx->output_frames->hwctx;
321 
322  av_assert0(ctx->va_context == VA_INVALID_ID);
323  vas = vaCreateContext(ctx->hwctx->display, ctx->va_config,
324  ctx->output_width, ctx->output_height, 0,
325  va_frames->surface_ids, va_frames->nb_surfaces,
326  &ctx->va_context);
327  if (vas != VA_STATUS_SUCCESS) {
328  av_log(avctx, AV_LOG_ERROR, "Failed to create processing pipeline "
329  "context: %d (%s).\n", vas, vaErrorStr(vas));
330  err = AVERROR(EIO);
331  goto fail;
332  }
333 
334  err = deint_vaapi_build_filter_params(avctx);
335  if (err < 0)
336  goto fail;
337 
338  outlink->w = inlink->w;
339  outlink->h = inlink->h;
340 
341  outlink->time_base = av_mul_q(inlink->time_base,
342  (AVRational) { 1, ctx->field_rate });
343  outlink->frame_rate = av_mul_q(inlink->frame_rate,
344  (AVRational) { ctx->field_rate, 1 });
345 
346  outlink->hw_frames_ctx = av_buffer_ref(ctx->output_frames_ref);
347  if (!outlink->hw_frames_ctx) {
348  err = AVERROR(ENOMEM);
349  goto fail;
350  }
351 
352  av_freep(&hwconfig);
353  av_hwframe_constraints_free(&constraints);
354  return 0;
355 
356 fail:
357  av_buffer_unref(&ctx->output_frames_ref);
358  av_freep(&hwconfig);
359  av_hwframe_constraints_free(&constraints);
360  return err;
361 }
362 
364 {
365  switch(av_cs) {
366 #define CS(av, va) case AVCOL_SPC_ ## av: return VAProcColorStandard ## va;
367  CS(BT709, BT709);
368  CS(BT470BG, BT470BG);
369  CS(SMPTE170M, SMPTE170M);
370  CS(SMPTE240M, SMPTE240M);
371 #undef CS
372  default:
373  return VAProcColorStandardNone;
374  }
375 }
376 
377 static int deint_vaapi_filter_frame(AVFilterLink *inlink, AVFrame *input_frame)
378 {
379  AVFilterContext *avctx = inlink->dst;
380  AVFilterLink *outlink = avctx->outputs[0];
381  DeintVAAPIContext *ctx = avctx->priv;
383  VASurfaceID input_surface, output_surface;
384  VASurfaceID backward_references[MAX_REFERENCES];
385  VASurfaceID forward_references[MAX_REFERENCES];
386  VAProcPipelineParameterBuffer params;
387  VAProcFilterParameterBufferDeinterlacing *filter_params;
388  VARectangle input_region;
389  VABufferID params_id;
390  VAStatus vas;
391  void *filter_params_addr = NULL;
392  int err, i, field, current_frame_index;
393 
394  av_log(avctx, AV_LOG_DEBUG, "Filter input: %s, %ux%u (%"PRId64").\n",
395  av_get_pix_fmt_name(input_frame->format),
396  input_frame->width, input_frame->height, input_frame->pts);
397 
398  if (ctx->queue_count < ctx->queue_depth) {
399  ctx->frame_queue[ctx->queue_count++] = input_frame;
400  if (ctx->queue_count < ctx->queue_depth) {
401  // Need more reference surfaces before we can continue.
402  return 0;
403  }
404  } else {
405  av_frame_free(&ctx->frame_queue[0]);
406  for (i = 0; i + 1 < ctx->queue_count; i++)
407  ctx->frame_queue[i] = ctx->frame_queue[i + 1];
408  ctx->frame_queue[i] = input_frame;
409  }
410 
411  current_frame_index = ctx->pipeline_caps.num_forward_references;
412 
413  input_frame = ctx->frame_queue[current_frame_index];
414  input_surface = (VASurfaceID)(uintptr_t)input_frame->data[3];
415  for (i = 0; i < ctx->pipeline_caps.num_forward_references; i++)
416  forward_references[i] = (VASurfaceID)(uintptr_t)
417  ctx->frame_queue[current_frame_index - i - 1]->data[3];
418  for (i = 0; i < ctx->pipeline_caps.num_backward_references; i++)
419  backward_references[i] = (VASurfaceID)(uintptr_t)
420  ctx->frame_queue[current_frame_index + i + 1]->data[3];
421 
422  av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for "
423  "deinterlace input.\n", input_surface);
424  av_log(avctx, AV_LOG_DEBUG, "Backward references:");
425  for (i = 0; i < ctx->pipeline_caps.num_backward_references; i++)
426  av_log(avctx, AV_LOG_DEBUG, " %#x", backward_references[i]);
427  av_log(avctx, AV_LOG_DEBUG, "\n");
428  av_log(avctx, AV_LOG_DEBUG, "Forward references:");
429  for (i = 0; i < ctx->pipeline_caps.num_forward_references; i++)
430  av_log(avctx, AV_LOG_DEBUG, " %#x", forward_references[i]);
431  av_log(avctx, AV_LOG_DEBUG, "\n");
432 
433  for (field = 0; field < ctx->field_rate; field++) {
434  output_frame = ff_get_video_buffer(outlink, ctx->output_width,
435  ctx->output_height);
436  if (!output_frame) {
437  err = AVERROR(ENOMEM);
438  goto fail;
439  }
440 
441  output_surface = (VASurfaceID)(uintptr_t)output_frame->data[3];
442  av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for "
443  "deinterlace output.\n", output_surface);
444 
445  memset(&params, 0, sizeof(params));
446 
447  input_region = (VARectangle) {
448  .x = 0,
449  .y = 0,
450  .width = input_frame->width,
451  .height = input_frame->height,
452  };
453 
454  params.surface = input_surface;
455  params.surface_region = &input_region;
456  params.surface_color_standard =
458 
459  params.output_region = NULL;
460  params.output_background_color = 0xff000000;
461  params.output_color_standard = params.surface_color_standard;
462 
463  params.pipeline_flags = 0;
464  params.filter_flags = VA_FRAME_PICTURE;
465 
466  if (!ctx->auto_enable || input_frame->interlaced_frame) {
467  vas = vaMapBuffer(ctx->hwctx->display, ctx->filter_buffer,
468  &filter_params_addr);
469  if (vas != VA_STATUS_SUCCESS) {
470  av_log(avctx, AV_LOG_ERROR, "Failed to map filter parameter "
471  "buffer: %d (%s).\n", vas, vaErrorStr(vas));
472  err = AVERROR(EIO);
473  goto fail;
474  }
475  filter_params = filter_params_addr;
476  filter_params->flags = 0;
477  if (input_frame->top_field_first) {
478  filter_params->flags |= field ? VA_DEINTERLACING_BOTTOM_FIELD : 0;
479  } else {
480  filter_params->flags |= VA_DEINTERLACING_BOTTOM_FIELD_FIRST;
481  filter_params->flags |= field ? 0 : VA_DEINTERLACING_BOTTOM_FIELD;
482  }
483  filter_params_addr = NULL;
484  vas = vaUnmapBuffer(ctx->hwctx->display, ctx->filter_buffer);
485  if (vas != VA_STATUS_SUCCESS)
486  av_log(avctx, AV_LOG_ERROR, "Failed to unmap filter parameter "
487  "buffer: %d (%s).\n", vas, vaErrorStr(vas));
488 
489  params.filters = &ctx->filter_buffer;
490  params.num_filters = 1;
491 
492  params.forward_references = forward_references;
493  params.num_forward_references =
494  ctx->pipeline_caps.num_forward_references;
495  params.backward_references = backward_references;
496  params.num_backward_references =
497  ctx->pipeline_caps.num_backward_references;
498 
499  } else {
500  params.filters = NULL;
501  params.num_filters = 0;
502  }
503 
504  vas = vaBeginPicture(ctx->hwctx->display,
505  ctx->va_context, output_surface);
506  if (vas != VA_STATUS_SUCCESS) {
507  av_log(avctx, AV_LOG_ERROR, "Failed to attach new picture: "
508  "%d (%s).\n", vas, vaErrorStr(vas));
509  err = AVERROR(EIO);
510  goto fail;
511  }
512 
513  vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
514  VAProcPipelineParameterBufferType,
515  sizeof(params), 1, &params, &params_id);
516  if (vas != VA_STATUS_SUCCESS) {
517  av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: "
518  "%d (%s).\n", vas, vaErrorStr(vas));
519  err = AVERROR(EIO);
520  goto fail_after_begin;
521  }
522  av_log(avctx, AV_LOG_DEBUG, "Pipeline parameter buffer is %#x.\n",
523  params_id);
524 
525  vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,
526  &params_id, 1);
527  if (vas != VA_STATUS_SUCCESS) {
528  av_log(avctx, AV_LOG_ERROR, "Failed to render parameter buffer: "
529  "%d (%s).\n", vas, vaErrorStr(vas));
530  err = AVERROR(EIO);
531  goto fail_after_begin;
532  }
533 
534  vas = vaEndPicture(ctx->hwctx->display, ctx->va_context);
535  if (vas != VA_STATUS_SUCCESS) {
536  av_log(avctx, AV_LOG_ERROR, "Failed to start picture processing: "
537  "%d (%s).\n", vas, vaErrorStr(vas));
538  err = AVERROR(EIO);
539  goto fail_after_render;
540  }
541 
542  if (CONFIG_VAAPI_1 || ctx->hwctx->driver_quirks &
544  vas = vaDestroyBuffer(ctx->hwctx->display, params_id);
545  if (vas != VA_STATUS_SUCCESS) {
546  av_log(avctx, AV_LOG_ERROR, "Failed to free parameter buffer: "
547  "%d (%s).\n", vas, vaErrorStr(vas));
548  // And ignore.
549  }
550  }
551 
552  err = av_frame_copy_props(output_frame, input_frame);
553  if (err < 0)
554  goto fail;
555 
556  if (ctx->field_rate == 2) {
557  if (field == 0)
558  output_frame->pts = 2 * input_frame->pts;
559  else
560  output_frame->pts = input_frame->pts +
561  ctx->frame_queue[current_frame_index + 1]->pts;
562  }
563  output_frame->interlaced_frame = 0;
564 
565  av_log(avctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n",
566  av_get_pix_fmt_name(output_frame->format),
567  output_frame->width, output_frame->height, output_frame->pts);
568 
569  err = ff_filter_frame(outlink, output_frame);
570  if (err < 0)
571  break;
572  }
573 
574  return err;
575 
576 fail_after_begin:
577  vaRenderPicture(ctx->hwctx->display, ctx->va_context, &params_id, 1);
578 fail_after_render:
579  vaEndPicture(ctx->hwctx->display, ctx->va_context);
580 fail:
581  if (filter_params_addr)
582  vaUnmapBuffer(ctx->hwctx->display, ctx->filter_buffer);
583  av_frame_free(&output_frame);
584  return err;
585 }
586 
588 {
589  DeintVAAPIContext *ctx = avctx->priv;
590 
591  ctx->va_config = VA_INVALID_ID;
592  ctx->va_context = VA_INVALID_ID;
593  ctx->filter_buffer = VA_INVALID_ID;
594  ctx->valid_ids = 1;
595 
596  return 0;
597 }
598 
600 {
601  DeintVAAPIContext *ctx = avctx->priv;
602 
603  if (ctx->valid_ids)
605 
609 }
610 
611 #define OFFSET(x) offsetof(DeintVAAPIContext, x)
612 #define FLAGS (AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM)
613 static const AVOption deint_vaapi_options[] = {
614  { "mode", "Deinterlacing mode",
615  OFFSET(mode), AV_OPT_TYPE_INT, { .i64 = VAProcDeinterlacingNone },
616  VAProcDeinterlacingNone, VAProcDeinterlacingCount - 1, FLAGS, "mode" },
617  { "default", "Use the highest-numbered (and therefore possibly most advanced) deinterlacing algorithm",
618  0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingNone }, .unit = "mode" },
619  { "bob", "Use the bob deinterlacing algorithm",
620  0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingBob }, .unit = "mode" },
621  { "weave", "Use the weave deinterlacing algorithm",
622  0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingWeave }, .unit = "mode" },
623  { "motion_adaptive", "Use the motion adaptive deinterlacing algorithm",
624  0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingMotionAdaptive }, .unit = "mode" },
625  { "motion_compensated", "Use the motion compensated deinterlacing algorithm",
626  0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingMotionCompensated }, .unit = "mode" },
627 
628  { "rate", "Generate output at frame rate or field rate",
629  OFFSET(field_rate), AV_OPT_TYPE_INT, { .i64 = 1 }, 1, 2, FLAGS, "rate" },
630  { "frame", "Output at frame rate (one frame of output for each field-pair)",
631  0, AV_OPT_TYPE_CONST, { .i64 = 1 }, .unit = "rate" },
632  { "field", "Output at field rate (one frame of output for each field)",
633  0, AV_OPT_TYPE_CONST, { .i64 = 2 }, .unit = "rate" },
634 
635  { "auto", "Only deinterlace fields, passing frames through unchanged",
636  OFFSET(auto_enable), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, FLAGS },
637 
638  { NULL },
639 };
640 
641 static const AVClass deint_vaapi_class = {
642  .class_name = "deinterlace_vaapi",
643  .item_name = av_default_item_name,
644  .option = deint_vaapi_options,
645  .version = LIBAVUTIL_VERSION_INT,
646 };
647 
648 static const AVFilterPad deint_vaapi_inputs[] = {
649  {
650  .name = "default",
651  .type = AVMEDIA_TYPE_VIDEO,
652  .filter_frame = &deint_vaapi_filter_frame,
653  .config_props = &deint_vaapi_config_input,
654  },
655  { NULL }
656 };
657 
659  {
660  .name = "default",
661  .type = AVMEDIA_TYPE_VIDEO,
662  .config_props = &deint_vaapi_config_output,
663  },
664  { NULL }
665 };
666 
668  .name = "deinterlace_vaapi",
669  .description = NULL_IF_CONFIG_SMALL("Deinterlacing of VAAPI surfaces"),
670  .priv_size = sizeof(DeintVAAPIContext),
674  .inputs = deint_vaapi_inputs,
675  .outputs = deint_vaapi_outputs,
676  .priv_class = &deint_vaapi_class,
677  .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
678 };
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:58
#define NULL
Definition: coverity.c:32
#define FF_FILTER_FLAG_HWFRAME_AWARE
The filter is aware of hardware frames, and any hardware frame context should not be automatically pr...
Definition: internal.h:385
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it...
Definition: buffer.c:125
VAAPI-specific data associated with a frame pool.
This structure describes decoded (raw) audio or video data.
Definition: frame.h:201
The driver does not destroy parameter buffers when they are used by vaRenderPicture().
AVOption.
Definition: opt.h:246
#define LIBAVUTIL_VERSION_INT
Definition: version.h:86
Main libavfilter public API header.
Memory handling functions.
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
VAProcFilterCapDeinterlacing deint_caps[VAProcDeinterlacingCount]
static int deint_vaapi_filter_frame(AVFilterLink *inlink, AVFrame *input_frame)
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:226
void * av_hwdevice_hwconfig_alloc(AVBufferRef *ref)
Allocate a HW-specific configuration structure for a given HW device.
Definition: hwcontext.c:518
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:206
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:92
static int deint_vaapi_build_filter_params(AVFilterContext *avctx)
int max_width
The maximum size of frames in this hw_frames_ctx.
Definition: hwcontext.h:455
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:283
API-specific header for AV_HWDEVICE_TYPE_VAAPI.
void av_hwframe_constraints_free(AVHWFramesConstraints **constraints)
Free an AVHWFrameConstraints structure.
Definition: hwcontext.c:554
const char * name
Pad name.
Definition: internal.h:60
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:72
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:346
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1151
AVVAAPIDeviceContext * hwctx
#define av_cold
Definition: attributes.h:82
static av_cold int uninit(AVCodecContext *avctx)
Definition: crystalhd.c:279
AVOptions.
AVColorSpace
YUV colorspace type.
Definition: pixfmt.h:484
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:294
#define D(name)
AVFilter ff_vf_deinterlace_vaapi
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:192
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:348
static int deint_vaapi_query_formats(AVFilterContext *avctx)
#define av_log(a,...)
A filter pad used for either input or output.
Definition: internal.h:54
AVHWFramesContext * input_frames
int width
Definition: frame.h:259
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
VAAPI hardware pipeline configuration details.
av_default_item_name
#define AVERROR(e)
Definition: error.h:43
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:163
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:179
void * priv
private data for use by the filter
Definition: avfilter.h:353
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:197
GLenum GLint * params
Definition: opengl_enc.c:114
enum AVColorSpace colorspace
YUV colorspace type.
Definition: frame.h:457
simple assert() macros that are a bit more flexible than ISO C assert().
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:323
#define fail()
Definition: checkasm.h:109
static int deint_vaapi_config_output(AVFilterLink *outlink)
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:196
AVFrame * frame_queue[MAX_REFERENCES]
static const char * deint_vaapi_mode_name(int mode)
#define OFFSET(x)
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:440
AVFormatContext * ctx
Definition: movenc.c:48
static int deint_vaapi_config_input(AVFilterLink *inlink)
static const AVFilterPad outputs[]
Definition: af_afftfilt.c:389
AVBufferRef * input_frames_ref
AVBufferRef * device_ref
VAProcPipelineCaps pipeline_caps
VADisplay display
The VADisplay handle, to be filled by the user.
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:274
int min_width
The minimum size of frames in this hw_frames_ctx.
Definition: hwcontext.h:448
static const AVFilterPad inputs[]
Definition: af_afftfilt.c:379
#define FLAGS
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:430
AVBufferRef * output_frames_ref
AVHWFramesConstraints * av_hwdevice_get_hwframe_constraints(AVBufferRef *ref, const void *hwconfig)
Get the constraints on HW frames given a device and the HW-specific configuration to be used with tha...
Definition: hwcontext.c:529
uint8_t * data
The data buffer.
Definition: buffer.h:89
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:159
unsigned int driver_quirks
Driver quirks to apply - this is filled by av_hwdevice_ctx_init(), with reference to a table of known...
static int output_frame(H264Context *h, AVFrame *dst, H264Picture *srcp)
Definition: h264dec.c:846
AVHWFramesContext * output_frames
Describe the class of an AVClass context structure.
Definition: log.h:67
Filter definition.
Definition: avfilter.h:144
Rational number (pair of numerator and denominator).
Definition: rational.h:58
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:121
const char * name
Filter name.
Definition: avfilter.h:148
static const AVClass deint_vaapi_class
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:350
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:266
static int deint_vaapi_pipeline_uninit(AVFilterContext *avctx)
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:215
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:138
#define MAX_REFERENCES
A reference to a data buffer.
Definition: buffer.h:81
static av_cold int deint_vaapi_init(AVFilterContext *avctx)
static const AVFilterPad deint_vaapi_inputs[]
static int query_formats(AVFilterContext *ctx)
Definition: aeval.c:244
common internal and external API header
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:237
AVBufferRef * av_buffer_ref(AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:93
static const AVFilterPad deint_vaapi_outputs[]
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:353
VAAPI connection details.
VAConfigID config_id
ID of a VAAPI pipeline configuration.
An instance of a filter.
Definition: avfilter.h:338
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
Definition: rational.c:80
int height
Definition: frame.h:259
#define av_freep(p)
VASurfaceID * surface_ids
The surfaces IDs of all surfaces in the pool after creation.
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2335
internal API functions
#define CS(av, va)
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:219
AVPixelFormat
Pixel format.
Definition: pixfmt.h:60
mode
Use these values in ebur128_init (or'ed).
Definition: ebur128.h:83
static int vaapi_proc_colour_standard(enum AVColorSpace av_cs)
for(j=16;j >0;--j)
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:603
static av_cold void deint_vaapi_uninit(AVFilterContext *avctx)
static const AVOption deint_vaapi_options[]