FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
vf_deinterlace_vaapi.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include <string.h>
20 
21 #include <va/va.h>
22 #include <va/va_vpp.h>
23 
24 #include "libavutil/avassert.h"
25 #include "libavutil/hwcontext.h"
27 #include "libavutil/mem.h"
28 #include "libavutil/opt.h"
29 #include "libavutil/pixdesc.h"
30 
31 #include "avfilter.h"
32 #include "formats.h"
33 #include "internal.h"
34 #include "video.h"
35 
36 #define MAX_REFERENCES 8
37 
38 typedef struct DeintVAAPIContext {
39  const AVClass *class;
40 
43 
44  int mode;
45 
46  int valid_ids;
47  VAConfigID va_config;
48  VAContextID va_context;
49 
52 
57 
58  VAProcFilterCapDeinterlacing
59  deint_caps[VAProcDeinterlacingCount];
61  VAProcPipelineCaps pipeline_caps;
62 
66 
67  VABufferID filter_buffer;
69 
70 static const char *deint_vaapi_mode_name(int mode)
71 {
72  switch (mode) {
73 #define D(name) case VAProcDeinterlacing ## name: return #name
74  D(Bob);
75  D(Weave);
76  D(MotionAdaptive);
77  D(MotionCompensated);
78 #undef D
79  default:
80  return "Invalid";
81  }
82 }
83 
85 {
86  enum AVPixelFormat pix_fmts[] = {
88  };
89  int err;
90 
91  if ((err = ff_formats_ref(ff_make_format_list(pix_fmts),
92  &avctx->inputs[0]->out_formats)) < 0)
93  return err;
94  if ((err = ff_formats_ref(ff_make_format_list(pix_fmts),
95  &avctx->outputs[0]->in_formats)) < 0)
96  return err;
97 
98  return 0;
99 }
100 
102 {
103  DeintVAAPIContext *ctx = avctx->priv;
104  int i;
105 
106  for (i = 0; i < ctx->queue_count; i++)
107  av_frame_free(&ctx->frame_queue[i]);
108  ctx->queue_count = 0;
109 
110  if (ctx->filter_buffer != VA_INVALID_ID) {
111  vaDestroyBuffer(ctx->hwctx->display, ctx->filter_buffer);
112  ctx->filter_buffer = VA_INVALID_ID;
113  }
114 
115  if (ctx->va_context != VA_INVALID_ID) {
116  vaDestroyContext(ctx->hwctx->display, ctx->va_context);
117  ctx->va_context = VA_INVALID_ID;
118  }
119 
120  if (ctx->va_config != VA_INVALID_ID) {
121  vaDestroyConfig(ctx->hwctx->display, ctx->va_config);
122  ctx->va_config = VA_INVALID_ID;
123  }
124 
126  ctx->hwctx = NULL;
127 
128  return 0;
129 }
130 
132 {
133  AVFilterContext *avctx = inlink->dst;
134  DeintVAAPIContext *ctx = avctx->priv;
135 
137 
138  if (!inlink->hw_frames_ctx) {
139  av_log(avctx, AV_LOG_ERROR, "A hardware frames reference is "
140  "required to associate the processing device.\n");
141  return AVERROR(EINVAL);
142  }
143 
146 
147  return 0;
148 }
149 
151 {
152  DeintVAAPIContext *ctx = avctx->priv;
153  VAStatus vas;
154  VAProcFilterParameterBufferDeinterlacing params;
155  int i;
156 
157  ctx->nb_deint_caps = VAProcDeinterlacingCount;
158  vas = vaQueryVideoProcFilterCaps(ctx->hwctx->display,
159  ctx->va_context,
160  VAProcFilterDeinterlacing,
161  &ctx->deint_caps,
162  &ctx->nb_deint_caps);
163  if (vas != VA_STATUS_SUCCESS) {
164  av_log(avctx, AV_LOG_ERROR, "Failed to query deinterlacing "
165  "caps: %d (%s).\n", vas, vaErrorStr(vas));
166  return AVERROR(EIO);
167  }
168 
169  if (ctx->mode == VAProcDeinterlacingNone) {
170  for (i = 0; i < ctx->nb_deint_caps; i++) {
171  if (ctx->deint_caps[i].type > ctx->mode)
172  ctx->mode = ctx->deint_caps[i].type;
173  }
174  av_log(avctx, AV_LOG_VERBOSE, "Picking %d (%s) as default "
175  "deinterlacing mode.\n", ctx->mode,
177  } else {
178  for (i = 0; i < ctx->nb_deint_caps; i++) {
179  if (ctx->deint_caps[i].type == ctx->mode)
180  break;
181  }
182  if (i >= ctx->nb_deint_caps) {
183  av_log(avctx, AV_LOG_ERROR, "Deinterlacing mode %d (%s) is "
184  "not supported.\n", ctx->mode,
186  }
187  }
188 
189  params.type = VAProcFilterDeinterlacing;
190  params.algorithm = ctx->mode;
191  params.flags = 0;
192 
193  av_assert0(ctx->filter_buffer == VA_INVALID_ID);
194  vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
195  VAProcFilterParameterBufferType,
196  sizeof(params), 1, &params,
197  &ctx->filter_buffer);
198  if (vas != VA_STATUS_SUCCESS) {
199  av_log(avctx, AV_LOG_ERROR, "Failed to create deinterlace "
200  "parameter buffer: %d (%s).\n", vas, vaErrorStr(vas));
201  return AVERROR(EIO);
202  }
203 
204  vas = vaQueryVideoProcPipelineCaps(ctx->hwctx->display,
205  ctx->va_context,
206  &ctx->filter_buffer, 1,
207  &ctx->pipeline_caps);
208  if (vas != VA_STATUS_SUCCESS) {
209  av_log(avctx, AV_LOG_ERROR, "Failed to query pipeline "
210  "caps: %d (%s).\n", vas, vaErrorStr(vas));
211  return AVERROR(EIO);
212  }
213 
214  ctx->queue_depth = ctx->pipeline_caps.num_backward_references +
215  ctx->pipeline_caps.num_forward_references + 1;
216  if (ctx->queue_depth > MAX_REFERENCES) {
217  av_log(avctx, AV_LOG_ERROR, "Pipeline requires too many "
218  "references (%u forward, %u back).\n",
219  ctx->pipeline_caps.num_forward_references,
220  ctx->pipeline_caps.num_backward_references);
221  return AVERROR(ENOSYS);
222  }
223 
224  return 0;
225 }
226 
228 {
229  AVFilterContext *avctx = outlink->src;
230  DeintVAAPIContext *ctx = avctx->priv;
231  AVVAAPIHWConfig *hwconfig = NULL;
232  AVHWFramesConstraints *constraints = NULL;
233  AVVAAPIFramesContext *va_frames;
234  VAStatus vas;
235  int err;
236 
238 
239  av_assert0(ctx->input_frames);
241  ctx->hwctx = ((AVHWDeviceContext*)ctx->device_ref->data)->hwctx;
242 
243  ctx->output_width = ctx->input_frames->width;
244  ctx->output_height = ctx->input_frames->height;
245 
246  av_assert0(ctx->va_config == VA_INVALID_ID);
247  vas = vaCreateConfig(ctx->hwctx->display, VAProfileNone,
248  VAEntrypointVideoProc, 0, 0, &ctx->va_config);
249  if (vas != VA_STATUS_SUCCESS) {
250  av_log(avctx, AV_LOG_ERROR, "Failed to create processing pipeline "
251  "config: %d (%s).\n", vas, vaErrorStr(vas));
252  err = AVERROR(EIO);
253  goto fail;
254  }
255 
256  hwconfig = av_hwdevice_hwconfig_alloc(ctx->device_ref);
257  if (!hwconfig) {
258  err = AVERROR(ENOMEM);
259  goto fail;
260  }
261  hwconfig->config_id = ctx->va_config;
262 
264  hwconfig);
265  if (!constraints) {
266  err = AVERROR(ENOMEM);
267  goto fail;
268  }
269 
270  if (ctx->output_width < constraints->min_width ||
271  ctx->output_height < constraints->min_height ||
272  ctx->output_width > constraints->max_width ||
273  ctx->output_height > constraints->max_height) {
274  av_log(avctx, AV_LOG_ERROR, "Hardware does not support "
275  "deinterlacing to size %dx%d "
276  "(constraints: width %d-%d height %d-%d).\n",
277  ctx->output_width, ctx->output_height,
278  constraints->min_width, constraints->max_width,
279  constraints->min_height, constraints->max_height);
280  err = AVERROR(EINVAL);
281  goto fail;
282  }
283 
285  if (!ctx->output_frames_ref) {
286  av_log(avctx, AV_LOG_ERROR, "Failed to create HW frame context "
287  "for output.\n");
288  err = AVERROR(ENOMEM);
289  goto fail;
290  }
291 
293 
296  ctx->output_frames->width = ctx->output_width;
297  ctx->output_frames->height = ctx->output_height;
298 
299  // The number of output frames we need is determined by what follows
300  // the filter. If it's an encoder with complex frame reference
301  // structures then this could be very high.
302  ctx->output_frames->initial_pool_size = 10;
303 
305  if (err < 0) {
306  av_log(avctx, AV_LOG_ERROR, "Failed to initialise VAAPI frame "
307  "context for output: %d\n", err);
308  goto fail;
309  }
310 
311  va_frames = ctx->output_frames->hwctx;
312 
313  av_assert0(ctx->va_context == VA_INVALID_ID);
314  vas = vaCreateContext(ctx->hwctx->display, ctx->va_config,
315  ctx->output_width, ctx->output_height, 0,
316  va_frames->surface_ids, va_frames->nb_surfaces,
317  &ctx->va_context);
318  if (vas != VA_STATUS_SUCCESS) {
319  av_log(avctx, AV_LOG_ERROR, "Failed to create processing pipeline "
320  "context: %d (%s).\n", vas, vaErrorStr(vas));
321  err = AVERROR(EIO);
322  goto fail;
323  }
324 
325  err = deint_vaapi_build_filter_params(avctx);
326  if (err < 0)
327  goto fail;
328 
329  outlink->w = ctx->output_width;
330  outlink->h = ctx->output_height;
331 
333  if (!outlink->hw_frames_ctx) {
334  err = AVERROR(ENOMEM);
335  goto fail;
336  }
337 
338  av_freep(&hwconfig);
339  av_hwframe_constraints_free(&constraints);
340  return 0;
341 
342 fail:
344  av_freep(&hwconfig);
345  av_hwframe_constraints_free(&constraints);
346  return err;
347 }
348 
350 {
351  switch(av_cs) {
352 #define CS(av, va) case AVCOL_SPC_ ## av: return VAProcColorStandard ## va;
353  CS(BT709, BT709);
354  CS(BT470BG, BT470BG);
355  CS(SMPTE170M, SMPTE170M);
356  CS(SMPTE240M, SMPTE240M);
357 #undef CS
358  default:
359  return VAProcColorStandardNone;
360  }
361 }
362 
363 static int deint_vaapi_filter_frame(AVFilterLink *inlink, AVFrame *input_frame)
364 {
365  AVFilterContext *avctx = inlink->dst;
366  AVFilterLink *outlink = avctx->outputs[0];
367  DeintVAAPIContext *ctx = avctx->priv;
369  VASurfaceID input_surface, output_surface;
370  VASurfaceID backward_references[MAX_REFERENCES];
371  VASurfaceID forward_references[MAX_REFERENCES];
372  VAProcPipelineParameterBuffer params;
373  VAProcFilterParameterBufferDeinterlacing *filter_params;
374  VARectangle input_region;
375  VABufferID params_id;
376  VAStatus vas;
377  void *filter_params_addr = NULL;
378  int err, i;
379 
380  av_log(avctx, AV_LOG_DEBUG, "Filter input: %s, %ux%u (%"PRId64").\n",
381  av_get_pix_fmt_name(input_frame->format),
382  input_frame->width, input_frame->height, input_frame->pts);
383 
384  if (ctx->queue_count < ctx->queue_depth) {
385  ctx->frame_queue[ctx->queue_count++] = input_frame;
386  if (ctx->queue_count < ctx->queue_depth) {
387  // Need more reference surfaces before we can continue.
388  return 0;
389  }
390  } else {
391  av_frame_free(&ctx->frame_queue[0]);
392  for (i = 0; i + 1 < ctx->queue_count; i++)
393  ctx->frame_queue[i] = ctx->frame_queue[i + 1];
394  ctx->frame_queue[i] = input_frame;
395  }
396 
397  input_frame =
398  ctx->frame_queue[ctx->pipeline_caps.num_backward_references];
399  input_surface = (VASurfaceID)(uintptr_t)input_frame->data[3];
400  for (i = 0; i < ctx->pipeline_caps.num_backward_references; i++)
401  backward_references[i] = (VASurfaceID)(uintptr_t)
402  ctx->frame_queue[ctx->pipeline_caps.num_backward_references -
403  i - 1]->data[3];
404  for (i = 0; i < ctx->pipeline_caps.num_forward_references; i++)
405  forward_references[i] = (VASurfaceID)(uintptr_t)
406  ctx->frame_queue[ctx->pipeline_caps.num_backward_references +
407  i + 1]->data[3];
408 
409  av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for "
410  "deinterlace input.\n", input_surface);
411  av_log(avctx, AV_LOG_DEBUG, "Backward references:");
412  for (i = 0; i < ctx->pipeline_caps.num_backward_references; i++)
413  av_log(avctx, AV_LOG_DEBUG, " %#x", backward_references[i]);
414  av_log(avctx, AV_LOG_DEBUG, "\n");
415  av_log(avctx, AV_LOG_DEBUG, "Forward references:");
416  for (i = 0; i < ctx->pipeline_caps.num_forward_references; i++)
417  av_log(avctx, AV_LOG_DEBUG, " %#x", forward_references[i]);
418  av_log(avctx, AV_LOG_DEBUG, "\n");
419 
420  output_frame = av_frame_alloc();
421  if (!output_frame) {
422  err = AVERROR(ENOMEM);
423  goto fail;
424  }
425 
427  output_frame, 0);
428  if (err < 0) {
429  err = AVERROR(ENOMEM);
430  goto fail;
431  }
432 
433  output_surface = (VASurfaceID)(uintptr_t)output_frame->data[3];
434  av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for "
435  "deinterlace output.\n", output_surface);
436 
437  memset(&params, 0, sizeof(params));
438 
439  input_region = (VARectangle) {
440  .x = 0,
441  .y = 0,
442  .width = input_frame->width,
443  .height = input_frame->height,
444  };
445 
446  params.surface = input_surface;
447  params.surface_region = &input_region;
448  params.surface_color_standard = vaapi_proc_colour_standard(
449  av_frame_get_colorspace(input_frame));
450 
451  params.output_region = NULL;
452  params.output_background_color = 0xff000000;
453  params.output_color_standard = params.surface_color_standard;
454 
455  params.pipeline_flags = 0;
456  params.filter_flags = VA_FRAME_PICTURE;
457 
458  vas = vaMapBuffer(ctx->hwctx->display, ctx->filter_buffer,
459  &filter_params_addr);
460  if (vas != VA_STATUS_SUCCESS) {
461  av_log(avctx, AV_LOG_ERROR, "Failed to map filter parameter "
462  "buffer: %d (%s).\n", vas, vaErrorStr(vas));
463  err = AVERROR(EIO);
464  goto fail;
465  }
466  filter_params = filter_params_addr;
467  filter_params->flags = 0;
468  if (input_frame->interlaced_frame && !input_frame->top_field_first)
469  filter_params->flags |= VA_DEINTERLACING_BOTTOM_FIELD_FIRST;
470  filter_params_addr = NULL;
471  vas = vaUnmapBuffer(ctx->hwctx->display, ctx->filter_buffer);
472  if (vas != VA_STATUS_SUCCESS)
473  av_log(avctx, AV_LOG_ERROR, "Failed to unmap filter parameter "
474  "buffer: %d (%s).\n", vas, vaErrorStr(vas));
475 
476  params.filters = &ctx->filter_buffer;
477  params.num_filters = 1;
478 
479  params.forward_references = forward_references;
480  params.num_forward_references =
481  ctx->pipeline_caps.num_forward_references;
482  params.backward_references = backward_references;
483  params.num_backward_references =
484  ctx->pipeline_caps.num_backward_references;
485 
486  vas = vaBeginPicture(ctx->hwctx->display,
487  ctx->va_context, output_surface);
488  if (vas != VA_STATUS_SUCCESS) {
489  av_log(avctx, AV_LOG_ERROR, "Failed to attach new picture: "
490  "%d (%s).\n", vas, vaErrorStr(vas));
491  err = AVERROR(EIO);
492  goto fail;
493  }
494 
495  vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
496  VAProcPipelineParameterBufferType,
497  sizeof(params), 1, &params, &params_id);
498  if (vas != VA_STATUS_SUCCESS) {
499  av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: "
500  "%d (%s).\n", vas, vaErrorStr(vas));
501  err = AVERROR(EIO);
502  goto fail_after_begin;
503  }
504  av_log(avctx, AV_LOG_DEBUG, "Pipeline parameter buffer is %#x.\n",
505  params_id);
506 
507  vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,
508  &params_id, 1);
509  if (vas != VA_STATUS_SUCCESS) {
510  av_log(avctx, AV_LOG_ERROR, "Failed to render parameter buffer: "
511  "%d (%s).\n", vas, vaErrorStr(vas));
512  err = AVERROR(EIO);
513  goto fail_after_begin;
514  }
515 
516  vas = vaEndPicture(ctx->hwctx->display, ctx->va_context);
517  if (vas != VA_STATUS_SUCCESS) {
518  av_log(avctx, AV_LOG_ERROR, "Failed to start picture processing: "
519  "%d (%s).\n", vas, vaErrorStr(vas));
520  err = AVERROR(EIO);
521  goto fail_after_render;
522  }
523 
524  if (ctx->hwctx->driver_quirks &
526  vas = vaDestroyBuffer(ctx->hwctx->display, params_id);
527  if (vas != VA_STATUS_SUCCESS) {
528  av_log(avctx, AV_LOG_ERROR, "Failed to free parameter buffer: "
529  "%d (%s).\n", vas, vaErrorStr(vas));
530  // And ignore.
531  }
532  }
533 
534  err = av_frame_copy_props(output_frame, input_frame);
535  if (err < 0)
536  goto fail;
537 
538  av_log(avctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n",
539  av_get_pix_fmt_name(output_frame->format),
540  output_frame->width, output_frame->height, output_frame->pts);
541 
542  return ff_filter_frame(outlink, output_frame);
543 
544 fail_after_begin:
545  vaRenderPicture(ctx->hwctx->display, ctx->va_context, &params_id, 1);
546 fail_after_render:
547  vaEndPicture(ctx->hwctx->display, ctx->va_context);
548 fail:
549  if (filter_params_addr)
550  vaUnmapBuffer(ctx->hwctx->display, ctx->filter_buffer);
551  av_frame_free(&output_frame);
552  return err;
553 }
554 
556 {
557  DeintVAAPIContext *ctx = avctx->priv;
558 
559  ctx->va_config = VA_INVALID_ID;
560  ctx->va_context = VA_INVALID_ID;
561  ctx->filter_buffer = VA_INVALID_ID;
562  ctx->valid_ids = 1;
563 
564  return 0;
565 }
566 
568 {
569  DeintVAAPIContext *ctx = avctx->priv;
570 
571  if (ctx->valid_ids)
573 
577 }
578 
579 #define OFFSET(x) offsetof(DeintVAAPIContext, x)
580 #define FLAGS (AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM)
581 static const AVOption deint_vaapi_options[] = {
582  { "mode", "Deinterlacing mode",
583  OFFSET(mode), AV_OPT_TYPE_INT, { .i64 = VAProcDeinterlacingNone },
584  VAProcDeinterlacingNone, VAProcDeinterlacingCount - 1, FLAGS, "mode" },
585  { "default", "Use the highest-numbered (and therefore possibly most advanced) deinterlacing algorithm",
586  0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingNone }, .unit = "mode" },
587  { "bob", "Use the bob deinterlacing algorithm",
588  0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingBob }, .unit = "mode" },
589  { "weave", "Use the weave deinterlacing algorithm",
590  0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingWeave }, .unit = "mode" },
591  { "motion_adaptive", "Use the motion adaptive deinterlacing algorithm",
592  0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingMotionAdaptive }, .unit = "mode" },
593  { "motion_compensated", "Use the motion compensated deinterlacing algorithm",
594  0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingMotionCompensated }, .unit = "mode" },
595  { NULL },
596 };
597 
598 static const AVClass deint_vaapi_class = {
599  .class_name = "deinterlace_vaapi",
600  .item_name = av_default_item_name,
601  .option = deint_vaapi_options,
602  .version = LIBAVUTIL_VERSION_INT,
603 };
604 
605 static const AVFilterPad deint_vaapi_inputs[] = {
606  {
607  .name = "default",
608  .type = AVMEDIA_TYPE_VIDEO,
609  .filter_frame = &deint_vaapi_filter_frame,
610  .config_props = &deint_vaapi_config_input,
611  },
612  { NULL }
613 };
614 
616  {
617  .name = "default",
618  .type = AVMEDIA_TYPE_VIDEO,
619  .config_props = &deint_vaapi_config_output,
620  },
621  { NULL }
622 };
623 
625  .name = "deinterlace_vaapi",
626  .description = NULL_IF_CONFIG_SMALL("Deinterlacing of VAAPI surfaces"),
627  .priv_size = sizeof(DeintVAAPIContext),
631  .inputs = deint_vaapi_inputs,
632  .outputs = deint_vaapi_outputs,
633  .priv_class = &deint_vaapi_class,
634  .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
635 };
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:54
#define NULL
Definition: coverity.c:32
#define FF_FILTER_FLAG_HWFRAME_AWARE
The filter is aware of hardware frames, and any hardware frame context should not be automatically pr...
Definition: internal.h:393
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it...
Definition: buffer.c:125
VAAPI-specific data associated with a frame pool.
This structure describes decoded (raw) audio or video data.
Definition: frame.h:187
AVOption.
Definition: opt.h:246
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
Main libavfilter public API header.
Memory handling functions.
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
VAProcFilterCapDeinterlacing deint_caps[VAProcDeinterlacingCount]
static int deint_vaapi_filter_frame(AVFilterLink *inlink, AVFrame *input_frame)
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:222
void * av_hwdevice_hwconfig_alloc(AVBufferRef *ref)
Allocate a HW-specific configuration structure for a given HW device.
Definition: hwcontext.c:457
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:202
static int deint_vaapi_build_filter_params(AVFilterContext *avctx)
int max_width
The maximum size of frames in this hw_frames_ctx.
Definition: hwcontext.h:398
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:283
API-specific header for AV_HWDEVICE_TYPE_VAAPI.
void av_hwframe_constraints_free(AVHWFramesConstraints **constraints)
Free an AVHWFrameConstraints structure.
Definition: hwcontext.c:493
const char * name
Pad name.
Definition: internal.h:60
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:72
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:331
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1125
AVVAAPIDeviceContext * hwctx
#define av_cold
Definition: attributes.h:82
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:150
static av_cold int uninit(AVCodecContext *avctx)
Definition: crystalhd.c:287
AVOptions.
AVColorSpace
YUV colorspace type.
Definition: pixfmt.h:451
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:271
#define D(name)
AVFilter ff_vf_deinterlace_vaapi
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:192
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:325
static int deint_vaapi_query_formats(AVFilterContext *avctx)
#define av_log(a,...)
A filter pad used for either input or output.
Definition: internal.h:54
AVHWFramesContext * input_frames
int width
width and height of the video frame
Definition: frame.h:239
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
VAAPI hardware pipeline configuration details.
av_default_item_name
#define AVERROR(e)
Definition: error.h:43
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:163
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:179
void * priv
private data for use by the filter
Definition: avfilter.h:338
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:197
GLenum GLint * params
Definition: opengl_enc.c:114
simple assert() macros that are a bit more flexible than ISO C assert().
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:268
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
Definition: hwcontext.c:404
#define fail()
Definition: checkasm.h:89
static int deint_vaapi_config_output(AVFilterLink *outlink)
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:192
AVFrame * frame_queue[MAX_REFERENCES]
static const char * deint_vaapi_mode_name(int mode)
#define OFFSET(x)
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:440
AVFormatContext * ctx
Definition: movenc.c:48
static int deint_vaapi_config_input(AVFilterLink *inlink)
static const AVFilterPad outputs[]
Definition: af_afftfilt.c:386
AVBufferRef * input_frames_ref
AVBufferRef * device_ref
VAProcPipelineCaps pipeline_caps
VADisplay display
The VADisplay handle, to be filled by the user.
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:251
int min_width
The minimum size of frames in this hw_frames_ctx.
Definition: hwcontext.h:391
static const AVFilterPad inputs[]
Definition: af_afftfilt.c:376
#define FLAGS
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:373
AVBufferRef * output_frames_ref
AVHWFramesConstraints * av_hwdevice_get_hwframe_constraints(AVBufferRef *ref, const void *hwconfig)
Get the constraints on HW frames given a device and the HW-specific configuration to be used with tha...
Definition: hwcontext.c:468
uint8_t * data
The data buffer.
Definition: buffer.h:89
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:155
unsigned int driver_quirks
Driver quirks to apply - this is filled by av_hwdevice_ctx_init(), with reference to a table of known...
static int output_frame(H264Context *h, AVFrame *dst, H264Picture *srcp)
Definition: h264dec.c:845
enum AVColorSpace av_frame_get_colorspace(const AVFrame *frame)
AVHWFramesContext * output_frames
Describe the class of an AVClass context structure.
Definition: log.h:67
Filter definition.
Definition: avfilter.h:144
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:117
const char * name
Filter name.
Definition: avfilter.h:148
static const AVClass deint_vaapi_class
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:335
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:262
static int deint_vaapi_pipeline_uninit(AVFilterContext *avctx)
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:201
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:134
#define MAX_REFERENCES
A reference to a data buffer.
Definition: buffer.h:81
static av_cold int deint_vaapi_init(AVFilterContext *avctx)
static const AVFilterPad deint_vaapi_inputs[]
static int query_formats(AVFilterContext *ctx)
Definition: aeval.c:244
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:182
AVBufferRef * av_buffer_ref(AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:93
static const AVFilterPad deint_vaapi_outputs[]
The driver does not destroy parameter buffers when they are used by vaRenderPicture().
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:330
VAAPI connection details.
VAConfigID config_id
ID of a VAAPI pipeline configuration.
An instance of a filter.
Definition: avfilter.h:323
int height
Definition: frame.h:239
#define av_freep(p)
VASurfaceID * surface_ids
The surfaces IDs of all surfaces in the pool after creation.
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2249
internal API functions
#define CS(av, va)
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:215
AVPixelFormat
Pixel format.
Definition: pixfmt.h:60
mode
Use these values in ebur128_init (or'ed).
Definition: ebur128.h:83
static int vaapi_proc_colour_standard(enum AVColorSpace av_cs)
for(j=16;j >0;--j)
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:596
static av_cold void deint_vaapi_uninit(AVFilterContext *avctx)
static const AVOption deint_vaapi_options[]