FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
vf_deinterlace_vaapi.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include <string.h>
20 
21 #include "libavutil/avassert.h"
22 #include "libavutil/common.h"
23 #include "libavutil/mem.h"
24 #include "libavutil/opt.h"
25 #include "libavutil/pixdesc.h"
26 
27 #include "avfilter.h"
28 #include "formats.h"
29 #include "internal.h"
30 #include "video.h"
31 #include "vaapi_vpp.h"
32 
33 #define MAX_REFERENCES 8
34 
35 typedef struct DeintVAAPIContext {
36  VAAPIVPPContext vpp_ctx; // must be the first field
37 
38  int mode;
41 
42  VAProcFilterCapDeinterlacing
43  deint_caps[VAProcDeinterlacingCount];
45  VAProcPipelineCaps pipeline_caps;
46 
52 
53 static const char *deint_vaapi_mode_name(int mode)
54 {
55  switch (mode) {
56 #define D(name) case VAProcDeinterlacing ## name: return #name
57  D(Bob);
58  D(Weave);
59  D(MotionAdaptive);
60  D(MotionCompensated);
61 #undef D
62  default:
63  return "Invalid";
64  }
65 }
66 
68 {
69  DeintVAAPIContext *ctx = avctx->priv;
70  int i;
71 
72  for (i = 0; i < ctx->queue_count; i++)
73  av_frame_free(&ctx->frame_queue[i]);
74  ctx->queue_count = 0;
75 
77 }
78 
80 {
81  VAAPIVPPContext *vpp_ctx = avctx->priv;
82  DeintVAAPIContext *ctx = avctx->priv;
83  VAStatus vas;
84  VAProcFilterParameterBufferDeinterlacing params;
85  int i;
86 
87  ctx->nb_deint_caps = VAProcDeinterlacingCount;
88  vas = vaQueryVideoProcFilterCaps(vpp_ctx->hwctx->display,
89  vpp_ctx->va_context,
90  VAProcFilterDeinterlacing,
91  &ctx->deint_caps,
92  &ctx->nb_deint_caps);
93  if (vas != VA_STATUS_SUCCESS) {
94  av_log(avctx, AV_LOG_ERROR, "Failed to query deinterlacing "
95  "caps: %d (%s).\n", vas, vaErrorStr(vas));
96  return AVERROR(EIO);
97  }
98 
99  if (ctx->mode == VAProcDeinterlacingNone) {
100  for (i = 0; i < ctx->nb_deint_caps; i++) {
101  if (ctx->deint_caps[i].type > ctx->mode)
102  ctx->mode = ctx->deint_caps[i].type;
103  }
104  av_log(avctx, AV_LOG_VERBOSE, "Picking %d (%s) as default "
105  "deinterlacing mode.\n", ctx->mode,
107  } else {
108  for (i = 0; i < ctx->nb_deint_caps; i++) {
109  if (ctx->deint_caps[i].type == ctx->mode)
110  break;
111  }
112  if (i >= ctx->nb_deint_caps) {
113  av_log(avctx, AV_LOG_ERROR, "Deinterlacing mode %d (%s) is "
114  "not supported.\n", ctx->mode,
116  }
117  }
118 
119  params.type = VAProcFilterDeinterlacing;
120  params.algorithm = ctx->mode;
121  params.flags = 0;
122 
124  VAProcFilterParameterBufferType,
125  &params,
126  sizeof(params),
127  1);
128  if (vas)
129  return vas;
130 
131  vas = vaQueryVideoProcPipelineCaps(vpp_ctx->hwctx->display,
132  vpp_ctx->va_context,
133  &vpp_ctx->filter_buffers[0], 1,
134  &ctx->pipeline_caps);
135  if (vas != VA_STATUS_SUCCESS) {
136  av_log(avctx, AV_LOG_ERROR, "Failed to query pipeline "
137  "caps: %d (%s).\n", vas, vaErrorStr(vas));
138  return AVERROR(EIO);
139  }
140 
141  ctx->extra_delay_for_timestamps = ctx->field_rate == 2 &&
142  ctx->pipeline_caps.num_backward_references == 0;
143 
144  ctx->queue_depth = ctx->pipeline_caps.num_backward_references +
145  ctx->pipeline_caps.num_forward_references +
147  if (ctx->queue_depth > MAX_REFERENCES) {
148  av_log(avctx, AV_LOG_ERROR, "Pipeline requires too many "
149  "references (%u forward, %u back).\n",
150  ctx->pipeline_caps.num_forward_references,
151  ctx->pipeline_caps.num_backward_references);
152  return AVERROR(ENOSYS);
153  }
154 
155  return 0;
156 }
157 
159 {
160  AVFilterLink *inlink = outlink->src->inputs[0];
161  AVFilterContext *avctx = outlink->src;
162  DeintVAAPIContext *ctx = avctx->priv;
163  int err;
164 
165  err = ff_vaapi_vpp_config_output(outlink);
166  if (err < 0)
167  return err;
168  outlink->time_base = av_mul_q(inlink->time_base,
169  (AVRational) { 1, ctx->field_rate });
170  outlink->frame_rate = av_mul_q(inlink->frame_rate,
171  (AVRational) { ctx->field_rate, 1 });
172 
173  return 0;
174 }
175 
176 static int deint_vaapi_filter_frame(AVFilterLink *inlink, AVFrame *input_frame)
177 {
178  AVFilterContext *avctx = inlink->dst;
179  AVFilterLink *outlink = avctx->outputs[0];
180  VAAPIVPPContext *vpp_ctx = avctx->priv;
181  DeintVAAPIContext *ctx = avctx->priv;
183  VASurfaceID input_surface, output_surface;
184  VASurfaceID backward_references[MAX_REFERENCES];
185  VASurfaceID forward_references[MAX_REFERENCES];
186  VAProcPipelineParameterBuffer params;
187  VAProcFilterParameterBufferDeinterlacing *filter_params;
188  VARectangle input_region;
189  VAStatus vas;
190  void *filter_params_addr = NULL;
191  int err, i, field, current_frame_index;
192 
193  av_log(avctx, AV_LOG_DEBUG, "Filter input: %s, %ux%u (%"PRId64").\n",
194  av_get_pix_fmt_name(input_frame->format),
195  input_frame->width, input_frame->height, input_frame->pts);
196 
197  if (ctx->queue_count < ctx->queue_depth) {
198  ctx->frame_queue[ctx->queue_count++] = input_frame;
199  if (ctx->queue_count < ctx->queue_depth) {
200  // Need more reference surfaces before we can continue.
201  return 0;
202  }
203  } else {
204  av_frame_free(&ctx->frame_queue[0]);
205  for (i = 0; i + 1 < ctx->queue_count; i++)
206  ctx->frame_queue[i] = ctx->frame_queue[i + 1];
207  ctx->frame_queue[i] = input_frame;
208  }
209 
210  current_frame_index = ctx->pipeline_caps.num_forward_references;
211 
212  input_frame = ctx->frame_queue[current_frame_index];
213  input_surface = (VASurfaceID)(uintptr_t)input_frame->data[3];
214  for (i = 0; i < ctx->pipeline_caps.num_forward_references; i++)
215  forward_references[i] = (VASurfaceID)(uintptr_t)
216  ctx->frame_queue[current_frame_index - i - 1]->data[3];
217  for (i = 0; i < ctx->pipeline_caps.num_backward_references; i++)
218  backward_references[i] = (VASurfaceID)(uintptr_t)
219  ctx->frame_queue[current_frame_index + i + 1]->data[3];
220 
221  av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for "
222  "deinterlace input.\n", input_surface);
223  av_log(avctx, AV_LOG_DEBUG, "Backward references:");
224  for (i = 0; i < ctx->pipeline_caps.num_backward_references; i++)
225  av_log(avctx, AV_LOG_DEBUG, " %#x", backward_references[i]);
226  av_log(avctx, AV_LOG_DEBUG, "\n");
227  av_log(avctx, AV_LOG_DEBUG, "Forward references:");
228  for (i = 0; i < ctx->pipeline_caps.num_forward_references; i++)
229  av_log(avctx, AV_LOG_DEBUG, " %#x", forward_references[i]);
230  av_log(avctx, AV_LOG_DEBUG, "\n");
231 
232  for (field = 0; field < ctx->field_rate; field++) {
233  output_frame = ff_get_video_buffer(outlink, vpp_ctx->output_width,
234  vpp_ctx->output_height);
235  if (!output_frame) {
236  err = AVERROR(ENOMEM);
237  goto fail;
238  }
239 
240  output_surface = (VASurfaceID)(uintptr_t)output_frame->data[3];
241  av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for "
242  "deinterlace output.\n", output_surface);
243 
244  memset(&params, 0, sizeof(params));
245 
246  input_region = (VARectangle) {
247  .x = 0,
248  .y = 0,
249  .width = input_frame->width,
250  .height = input_frame->height,
251  };
252 
253  params.surface = input_surface;
254  params.surface_region = &input_region;
255  params.surface_color_standard =
257 
258  params.output_region = NULL;
259  params.output_background_color = 0xff000000;
260  params.output_color_standard = params.surface_color_standard;
261 
262  params.pipeline_flags = 0;
263  params.filter_flags = VA_FRAME_PICTURE;
264 
265  if (!ctx->auto_enable || input_frame->interlaced_frame) {
266  vas = vaMapBuffer(vpp_ctx->hwctx->display, vpp_ctx->filter_buffers[0],
267  &filter_params_addr);
268  if (vas != VA_STATUS_SUCCESS) {
269  av_log(avctx, AV_LOG_ERROR, "Failed to map filter parameter "
270  "buffer: %d (%s).\n", vas, vaErrorStr(vas));
271  err = AVERROR(EIO);
272  goto fail;
273  }
274  filter_params = filter_params_addr;
275  filter_params->flags = 0;
276  if (input_frame->top_field_first) {
277  filter_params->flags |= field ? VA_DEINTERLACING_BOTTOM_FIELD : 0;
278  } else {
279  filter_params->flags |= VA_DEINTERLACING_BOTTOM_FIELD_FIRST;
280  filter_params->flags |= field ? 0 : VA_DEINTERLACING_BOTTOM_FIELD;
281  }
282  filter_params_addr = NULL;
283  vas = vaUnmapBuffer(vpp_ctx->hwctx->display, vpp_ctx->filter_buffers[0]);
284  if (vas != VA_STATUS_SUCCESS)
285  av_log(avctx, AV_LOG_ERROR, "Failed to unmap filter parameter "
286  "buffer: %d (%s).\n", vas, vaErrorStr(vas));
287 
288  params.filters = &vpp_ctx->filter_buffers[0];
289  params.num_filters = 1;
290 
291  params.forward_references = forward_references;
292  params.num_forward_references =
293  ctx->pipeline_caps.num_forward_references;
294  params.backward_references = backward_references;
295  params.num_backward_references =
296  ctx->pipeline_caps.num_backward_references;
297 
298  } else {
299  params.filters = NULL;
300  params.num_filters = 0;
301  }
302 
303  err = ff_vaapi_vpp_render_picture(avctx, &params, output_surface);
304  if (err < 0)
305  goto fail;
306 
307  err = av_frame_copy_props(output_frame, input_frame);
308  if (err < 0)
309  goto fail;
310 
311  if (ctx->field_rate == 2) {
312  if (field == 0)
313  output_frame->pts = 2 * input_frame->pts;
314  else
315  output_frame->pts = input_frame->pts +
316  ctx->frame_queue[current_frame_index + 1]->pts;
317  }
318  output_frame->interlaced_frame = 0;
319 
320  av_log(avctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n",
321  av_get_pix_fmt_name(output_frame->format),
322  output_frame->width, output_frame->height, output_frame->pts);
323 
324  err = ff_filter_frame(outlink, output_frame);
325  if (err < 0)
326  break;
327  }
328 
329  return err;
330 
331 fail:
332  if (filter_params_addr)
333  vaUnmapBuffer(vpp_ctx->hwctx->display, vpp_ctx->filter_buffers[0]);
334  av_frame_free(&output_frame);
335  return err;
336 }
337 
339 {
340  VAAPIVPPContext *vpp_ctx = avctx->priv;
341 
342  ff_vaapi_vpp_ctx_init(avctx);
345  vpp_ctx->output_format = AV_PIX_FMT_NONE;
346 
347  return 0;
348 }
349 
350 #define OFFSET(x) offsetof(DeintVAAPIContext, x)
351 #define FLAGS (AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM)
352 static const AVOption deint_vaapi_options[] = {
353  { "mode", "Deinterlacing mode",
354  OFFSET(mode), AV_OPT_TYPE_INT, { .i64 = VAProcDeinterlacingNone },
355  VAProcDeinterlacingNone, VAProcDeinterlacingCount - 1, FLAGS, "mode" },
356  { "default", "Use the highest-numbered (and therefore possibly most advanced) deinterlacing algorithm",
357  0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingNone }, 0, 0, FLAGS, "mode" },
358  { "bob", "Use the bob deinterlacing algorithm",
359  0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingBob }, 0, 0, FLAGS, "mode" },
360  { "weave", "Use the weave deinterlacing algorithm",
361  0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingWeave }, 0, 0, FLAGS, "mode" },
362  { "motion_adaptive", "Use the motion adaptive deinterlacing algorithm",
363  0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingMotionAdaptive }, 0, 0, FLAGS, "mode" },
364  { "motion_compensated", "Use the motion compensated deinterlacing algorithm",
365  0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingMotionCompensated }, 0, 0, FLAGS, "mode" },
366 
367  { "rate", "Generate output at frame rate or field rate",
368  OFFSET(field_rate), AV_OPT_TYPE_INT, { .i64 = 1 }, 1, 2, FLAGS, "rate" },
369  { "frame", "Output at frame rate (one frame of output for each field-pair)",
370  0, AV_OPT_TYPE_CONST, { .i64 = 1 }, 0, 0, FLAGS, "rate" },
371  { "field", "Output at field rate (one frame of output for each field)",
372  0, AV_OPT_TYPE_CONST, { .i64 = 2 }, 0, 0, FLAGS, "rate" },
373 
374  { "auto", "Only deinterlace fields, passing frames through unchanged",
375  OFFSET(auto_enable), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, FLAGS },
376 
377  { NULL },
378 };
379 
380 static const AVClass deint_vaapi_class = {
381  .class_name = "deinterlace_vaapi",
382  .item_name = av_default_item_name,
383  .option = deint_vaapi_options,
384  .version = LIBAVUTIL_VERSION_INT,
385 };
386 
387 static const AVFilterPad deint_vaapi_inputs[] = {
388  {
389  .name = "default",
390  .type = AVMEDIA_TYPE_VIDEO,
391  .filter_frame = &deint_vaapi_filter_frame,
392  .config_props = &ff_vaapi_vpp_config_input,
393  },
394  { NULL }
395 };
396 
398  {
399  .name = "default",
400  .type = AVMEDIA_TYPE_VIDEO,
401  .config_props = &deint_vaapi_config_output,
402  },
403  { NULL }
404 };
405 
407  .name = "deinterlace_vaapi",
408  .description = NULL_IF_CONFIG_SMALL("Deinterlacing of VAAPI surfaces"),
409  .priv_size = sizeof(DeintVAAPIContext),
413  .inputs = deint_vaapi_inputs,
414  .outputs = deint_vaapi_outputs,
415  .priv_class = &deint_vaapi_class,
416  .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
417 };
#define NULL
Definition: coverity.c:32
#define FF_FILTER_FLAG_HWFRAME_AWARE
The filter is aware of hardware frames, and any hardware frame context should not be automatically pr...
Definition: internal.h:385
This structure describes decoded (raw) audio or video data.
Definition: frame.h:226
int ff_vaapi_vpp_config_input(AVFilterLink *inlink)
Definition: vaapi_vpp.c:70
AVOption.
Definition: opt.h:246
int(* build_filter_params)(AVFilterContext *avctx)
Definition: vaapi_vpp.h:50
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
Main libavfilter public API header.
Memory handling functions.
int ff_vaapi_vpp_config_output(AVFilterLink *outlink)
Definition: vaapi_vpp.c:95
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
VAProcFilterCapDeinterlacing deint_caps[VAProcDeinterlacingCount]
static int deint_vaapi_filter_frame(AVFilterLink *inlink, AVFrame *input_frame)
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:191
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:99
static int deint_vaapi_build_filter_params(AVFilterContext *avctx)
VAAPIVPPContext vpp_ctx
const char * name
Pad name.
Definition: internal.h:60
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:72
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:346
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1080
#define av_cold
Definition: attributes.h:82
static av_cold int uninit(AVCodecContext *avctx)
Definition: crystalhd.c:279
AVOptions.
void(* pipeline_uninit)(AVFilterContext *avctx)
Definition: vaapi_vpp.h:52
static void deint_vaapi_pipeline_uninit(AVFilterContext *avctx)
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:319
#define D(name)
AVFilter ff_vf_deinterlace_vaapi
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:192
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:373
#define av_log(a,...)
int ff_vaapi_vpp_render_picture(AVFilterContext *avctx, VAProcPipelineParameterBuffer *params, VASurfaceID output_surface)
Definition: vaapi_vpp.c:280
A filter pad used for either input or output.
Definition: internal.h:54
int width
Definition: frame.h:284
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
#define AVERROR(e)
Definition: error.h:43
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:202
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:186
void * priv
private data for use by the filter
Definition: avfilter.h:353
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:197
GLenum GLint * params
Definition: opengl_enc.c:114
enum AVColorSpace colorspace
YUV colorspace type.
Definition: frame.h:482
simple assert() macros that are a bit more flexible than ISO C assert().
int ff_vaapi_vpp_make_param_buffers(AVFilterContext *avctx, int type, const void *data, size_t size, int count)
Definition: vaapi_vpp.c:251
#define fail()
Definition: checkasm.h:117
static int deint_vaapi_config_output(AVFilterLink *outlink)
AVFrame * frame_queue[MAX_REFERENCES]
static const char * deint_vaapi_mode_name(int mode)
#define OFFSET(x)
void ff_vaapi_vpp_pipeline_uninit(AVFilterContext *avctx)
Definition: vaapi_vpp.c:44
AVFormatContext * ctx
Definition: movenc.c:48
static const AVFilterPad inputs[]
Definition: af_acontrast.c:193
static const AVFilterPad outputs[]
Definition: af_acontrast.c:203
VAProcPipelineCaps pipeline_caps
VADisplay display
The VADisplay handle, to be filled by the user.
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:299
#define FLAGS
VABufferID filter_buffers[VAProcFilterCount]
Definition: vaapi_vpp.h:47
static int output_frame(H264Context *h, AVFrame *dst, H264Picture *srcp)
Definition: h264dec.c:832
Describe the class of an AVClass context structure.
Definition: log.h:67
Filter definition.
Definition: avfilter.h:144
Rational number (pair of numerator and denominator).
Definition: rational.h:58
const char * name
Filter name.
Definition: avfilter.h:148
static const AVClass deint_vaapi_class
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:350
VAContextID va_context
Definition: vaapi_vpp.h:38
enum AVPixelFormat output_format
Definition: vaapi_vpp.h:43
AVVAAPIDeviceContext * hwctx
Definition: vaapi_vpp.h:33
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:240
#define MAX_REFERENCES
static av_cold int deint_vaapi_init(AVFilterContext *avctx)
int ff_vaapi_vpp_query_formats(AVFilterContext *avctx)
Definition: vaapi_vpp.c:27
static const AVFilterPad deint_vaapi_inputs[]
static int query_formats(AVFilterContext *ctx)
Definition: aeval.c:244
common internal and external API header
void ff_vaapi_vpp_ctx_init(AVFilterContext *avctx)
Definition: vaapi_vpp.c:351
int ff_vaapi_vpp_colour_standard(enum AVColorSpace av_cs)
Definition: vaapi_vpp.c:237
static const AVFilterPad deint_vaapi_outputs[]
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:378
An instance of a filter.
Definition: avfilter.h:338
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
Definition: rational.c:80
int height
Definition: frame.h:284
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2362
internal API functions
mode
Use these values in ebur128_init (or'ed).
Definition: ebur128.h:83
void ff_vaapi_vpp_ctx_uninit(AVFilterContext *avctx)
Definition: vaapi_vpp.c:365
for(j=16;j >0;--j)
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:654
static const AVOption deint_vaapi_options[]