FFmpeg
vf_overlay_vaapi.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 #include <string.h>
19 
20 #include "libavutil/avassert.h"
21 #include "libavutil/mem.h"
22 #include "libavutil/opt.h"
23 #include "libavutil/pixdesc.h"
24 
25 #include "avfilter.h"
26 #include "framesync.h"
27 #include "formats.h"
28 #include "internal.h"
29 #include "vaapi_vpp.h"
30 #include "libavutil/eval.h"
31 
32 enum var_name {
42 };
43 
44 typedef struct OverlayVAAPIContext {
45  VAAPIVPPContext vpp_ctx; /**< must be the first field */
47 
49  char *overlay_ox;
50  char *overlay_oy;
51  char *overlay_ow;
52  char *overlay_oh;
53  int ox;
54  int oy;
55  int ow;
56  int oh;
57  float alpha;
58  unsigned int blend_flags;
59  float blend_alpha;
61 
62 static const char *const var_names[] = {
63  "main_w", "W", /* input width of the main layer */
64  "main_h", "H", /* input height of the main layer */
65  "overlay_iw", /* input width of the overlay layer */
66  "overlay_ih", /* input height of the overlay layer */
67  "overlay_x", "x", /* x position of the overlay layer inside of main */
68  "overlay_y", "y", /* y position of the overlay layer inside of main */
69  "overlay_w", "w", /* output width of overlay layer */
70  "overlay_h", "h", /* output height of overlay layer */
71  NULL
72 };
73 
74 static int eval_expr(AVFilterContext *avctx)
75 {
76  OverlayVAAPIContext *ctx = avctx->priv;
77  double *var_values = ctx->var_values;
78  int ret = 0;
79  AVExpr *ox_expr = NULL, *oy_expr = NULL;
80  AVExpr *ow_expr = NULL, *oh_expr = NULL;
81 
82 #define PARSE_EXPR(e, s) {\
83  ret = av_expr_parse(&(e), s, var_names, NULL, NULL, NULL, NULL, 0, ctx); \
84  if (ret < 0) {\
85  av_log(ctx, AV_LOG_ERROR, "Error when parsing '%s'.\n", s);\
86  goto release;\
87  }\
88 }
89  PARSE_EXPR(ox_expr, ctx->overlay_ox)
90  PARSE_EXPR(oy_expr, ctx->overlay_oy)
91  PARSE_EXPR(ow_expr, ctx->overlay_ow)
92  PARSE_EXPR(oh_expr, ctx->overlay_oh)
93 #undef PASS_EXPR
94 
95  var_values[VAR_OVERLAY_W] =
96  var_values[VAR_OW] = av_expr_eval(ow_expr, var_values, NULL);
97  var_values[VAR_OVERLAY_H] =
98  var_values[VAR_OH] = av_expr_eval(oh_expr, var_values, NULL);
99 
100  /* calc again in case ow is relative to oh */
101  var_values[VAR_OVERLAY_W] =
102  var_values[VAR_OW] = av_expr_eval(ow_expr, var_values, NULL);
103 
104  var_values[VAR_OVERLAY_X] =
105  var_values[VAR_OX] = av_expr_eval(ox_expr, var_values, NULL);
106  var_values[VAR_OVERLAY_Y] =
107  var_values[VAR_OY] = av_expr_eval(oy_expr, var_values, NULL);
108 
109  /* calc again in case ox is relative to oy */
110  var_values[VAR_OVERLAY_X] =
111  var_values[VAR_OX] = av_expr_eval(ox_expr, var_values, NULL);
112 
113  /* calc overlay_w and overlay_h again incase relative to ox,oy */
114  var_values[VAR_OVERLAY_W] =
115  var_values[VAR_OW] = av_expr_eval(ow_expr, var_values, NULL);
116  var_values[VAR_OVERLAY_H] =
117  var_values[VAR_OH] = av_expr_eval(oh_expr, var_values, NULL);
118  var_values[VAR_OVERLAY_W] =
119  var_values[VAR_OW] = av_expr_eval(ow_expr, var_values, NULL);
120 
121 release:
122  av_expr_free(ox_expr);
123  av_expr_free(oy_expr);
124  av_expr_free(ow_expr);
125  av_expr_free(oh_expr);
126 
127  return ret;
128 }
129 
131 {
132  VAAPIVPPContext *vpp_ctx = avctx->priv;
133  VAStatus vas;
134  int support_flag;
135  VAProcPipelineCaps pipeline_caps;
136 
137  memset(&pipeline_caps, 0, sizeof(pipeline_caps));
138  vas = vaQueryVideoProcPipelineCaps(vpp_ctx->hwctx->display,
139  vpp_ctx->va_context,
140  NULL, 0,
141  &pipeline_caps);
142  if (vas != VA_STATUS_SUCCESS) {
143  av_log(avctx, AV_LOG_ERROR, "Failed to query pipeline "
144  "caps: %d (%s).\n", vas, vaErrorStr(vas));
145  return AVERROR(EIO);
146  }
147 
148  if (!pipeline_caps.blend_flags) {
149  av_log(avctx, AV_LOG_ERROR, "VAAPI driver doesn't support overlay\n");
150  return AVERROR(EINVAL);
151  }
152 
153  support_flag = pipeline_caps.blend_flags & VA_BLEND_GLOBAL_ALPHA;
154  if (!support_flag) {
155  av_log(avctx, AV_LOG_ERROR, "VAAPI driver doesn't support global alpha blending\n");
156  return AVERROR(EINVAL);
157  }
158 
159  return 0;
160 }
161 
163  VAProcPipelineParameterBuffer *params,
164  VAProcPipelineParameterBuffer *subpic_params,
166 {
167  VAAPIVPPContext *ctx = avctx->priv;
168  VASurfaceID output_surface;
169  VABufferID params_id;
170  VABufferID subpic_params_id;
171  VAStatus vas;
172  int err = 0;
173 
174  output_surface = (VASurfaceID)(uintptr_t)output_frame->data[3];
175 
176  vas = vaBeginPicture(ctx->hwctx->display,
177  ctx->va_context, output_surface);
178  if (vas != VA_STATUS_SUCCESS) {
179  av_log(avctx, AV_LOG_ERROR, "Failed to attach new picture: "
180  "%d (%s).\n", vas, vaErrorStr(vas));
181  err = AVERROR(EIO);
182  goto fail;
183  }
184 
185  vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
186  VAProcPipelineParameterBufferType,
187  sizeof(*params), 1, params, &params_id);
188  if (vas != VA_STATUS_SUCCESS) {
189  av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: "
190  "%d (%s).\n", vas, vaErrorStr(vas));
191  err = AVERROR(EIO);
192  goto fail_after_begin;
193  }
194  av_log(avctx, AV_LOG_DEBUG, "Pipeline parameter buffer is %#x.\n",
195  params_id);
196 
197 
198  vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,
199  &params_id, 1);
200  if (vas != VA_STATUS_SUCCESS) {
201  av_log(avctx, AV_LOG_ERROR, "Failed to render parameter buffer: "
202  "%d (%s).\n", vas, vaErrorStr(vas));
203  err = AVERROR(EIO);
204  goto fail_after_begin;
205  }
206 
207  if (subpic_params) {
208  vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
209  VAProcPipelineParameterBufferType,
210  sizeof(*subpic_params), 1, subpic_params, &subpic_params_id);
211  if (vas != VA_STATUS_SUCCESS) {
212  av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: "
213  "%d (%s).\n", vas, vaErrorStr(vas));
214  err = AVERROR(EIO);
215  goto fail_after_begin;
216  }
217  av_log(avctx, AV_LOG_DEBUG, "Pipeline subpic parameter buffer is %#x.\n",
218  subpic_params_id);
219 
220  vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,
221  &subpic_params_id, 1);
222  if (vas != VA_STATUS_SUCCESS) {
223  av_log(avctx, AV_LOG_ERROR, "Failed to render subpic parameter buffer: "
224  "%d (%s).\n", vas, vaErrorStr(vas));
225  err = AVERROR(EIO);
226  goto fail_after_begin;
227  }
228  }
229 
230  vas = vaEndPicture(ctx->hwctx->display, ctx->va_context);
231  if (vas != VA_STATUS_SUCCESS) {
232  av_log(avctx, AV_LOG_ERROR, "Failed to start picture processing: "
233  "%d (%s).\n", vas, vaErrorStr(vas));
234  err = AVERROR(EIO);
235  goto fail_after_render;
236  }
237 
238  if (CONFIG_VAAPI_1 || ctx->hwctx->driver_quirks &
240  vas = vaDestroyBuffer(ctx->hwctx->display, params_id);
241  if (vas != VA_STATUS_SUCCESS) {
242  av_log(avctx, AV_LOG_ERROR, "Failed to free parameter buffer: "
243  "%d (%s).\n", vas, vaErrorStr(vas));
244  // And ignore.
245  }
246  }
247 
248  return 0;
249 
250  // We want to make sure that if vaBeginPicture has been called, we also
251  // call vaRenderPicture and vaEndPicture. These calls may well fail or
252  // do something else nasty, but once we're in this failure case there
253  // isn't much else we can do.
254 fail_after_begin:
255  vaRenderPicture(ctx->hwctx->display, ctx->va_context, &params_id, 1);
256 fail_after_render:
257  vaEndPicture(ctx->hwctx->display, ctx->va_context);
258 fail:
259  return err;
260 }
261 
263 {
264  AVFilterContext *avctx = fs->parent;
265  AVFilterLink *outlink = avctx->outputs[0];
266  OverlayVAAPIContext *ctx = avctx->priv;
267  VAAPIVPPContext *vpp_ctx = avctx->priv;
268  AVFrame *input_main, *input_overlay;
269  AVFrame *output;
270  VAProcPipelineParameterBuffer params, subpic_params;
271  VABlendState blend_state = { 0 }; /**< Blend State */
272  VARectangle overlay_region, output_region;
273  int err;
274 
275  err = ff_framesync_get_frame(fs, 0, &input_main, 0);
276  if (err < 0)
277  return err;
278  err = ff_framesync_get_frame(fs, 1, &input_overlay, 0);
279  if (err < 0)
280  return err;
281 
282  av_log(avctx, AV_LOG_DEBUG, "Filter main: %s, %ux%u (%"PRId64").\n",
283  av_get_pix_fmt_name(input_main->format),
284  input_main->width, input_main->height, input_main->pts);
285 
286  if (vpp_ctx->va_context == VA_INVALID_ID)
287  return AVERROR(EINVAL);
288 
289  output = ff_get_video_buffer(outlink, outlink->w, outlink->h);
290  if (!output) {
291  err = AVERROR(ENOMEM);
292  goto fail;
293  }
294 
295  err = av_frame_copy_props(output, input_main);
296  if (err < 0)
297  goto fail;
298 
299  err = ff_vaapi_vpp_init_params(avctx, &params,
300  input_main, output);
301  if (err < 0)
302  goto fail;
303 
304  output_region = (VARectangle) {
305  .x = 0,
306  .y = 0,
307  .width = output->width,
308  .height = output->height,
309  };
310 
311  params.filters = &vpp_ctx->filter_buffers[0];
312  params.num_filters = vpp_ctx->nb_filter_buffers;
313 
314  params.output_region = &output_region;
315  params.output_background_color = VAAPI_VPP_BACKGROUND_BLACK;
316 
317  if (input_overlay) {
318  av_log(avctx, AV_LOG_DEBUG, "Filter overlay: %s, %ux%u (%"PRId64").\n",
319  av_get_pix_fmt_name(input_overlay->format),
320  input_overlay->width, input_overlay->height, input_overlay->pts);
321 
322  overlay_region = (VARectangle) {
323  .x = ctx->ox,
324  .y = ctx->oy,
325  .width = ctx->ow ? ctx->ow : input_overlay->width,
326  .height = ctx->oh ? ctx->oh : input_overlay->height,
327  };
328 
329  if (overlay_region.x + overlay_region.width > input_main->width ||
330  overlay_region.y + overlay_region.height > input_main->height) {
332  "The overlay image exceeds the scope of the main image, "
333  "will crop the overlay image according based on the main image.\n");
334  }
335 
336  memcpy(&subpic_params, &params, sizeof(subpic_params));
337 
338  blend_state.flags = ctx->blend_flags;
339  blend_state.global_alpha = ctx->blend_alpha;
340  subpic_params.blend_state = &blend_state;
341 
342  subpic_params.surface = (VASurfaceID)(uintptr_t)input_overlay->data[3];
343  subpic_params.output_region = &overlay_region;
344  }
345 
346  err = overlay_vaapi_render_picture(avctx, &params, input_overlay ? &subpic_params : NULL, output);
347  if (err < 0)
348  goto fail;
349 
350  av_log(avctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n",
351  av_get_pix_fmt_name(output->format),
352  output->width, output->height, output->pts);
353 
354  return ff_filter_frame(outlink, output);
355 
356 fail:
358  return err;
359 }
360 
362 {
364  const AVPixFmtDescriptor *desc;
365  AVHWFramesContext *fctx;
366 
367  if (link->format == AV_PIX_FMT_VAAPI) {
369  pix_fmt = fctx->sw_format;
370  }
371 
373  if (!desc)
374  return 0;
375 
376  return !!(desc->flags & AV_PIX_FMT_FLAG_ALPHA);
377 }
378 
380 {
381  AVFilterContext *avctx = inlink->dst;
382  OverlayVAAPIContext *ctx = avctx->priv;
383 
384  ctx->var_values[VAR_MAIN_IW] =
385  ctx->var_values[VAR_MW] = inlink->w;
386  ctx->var_values[VAR_MAIN_IH] =
387  ctx->var_values[VAR_MH] = inlink->h;
388 
390 }
391 
393 {
394  AVFilterContext *avctx = inlink->dst;
395  OverlayVAAPIContext *ctx = avctx->priv;
396  int ret;
397 
398  ctx->var_values[VAR_OVERLAY_IW] = inlink->w;
399  ctx->var_values[VAR_OVERLAY_IH] = inlink->h;
400 
401  ret = eval_expr(avctx);
402  if (ret < 0)
403  return ret;
404 
405  ctx->ox = (int)ctx->var_values[VAR_OX];
406  ctx->oy = (int)ctx->var_values[VAR_OY];
407  ctx->ow = (int)ctx->var_values[VAR_OW];
408  ctx->oh = (int)ctx->var_values[VAR_OH];
409 
410  ctx->blend_flags = 0;
411  ctx->blend_alpha = 1.0f;
412 
413  if (ctx->alpha < 1.0f) {
414  ctx->blend_flags |= VA_BLEND_GLOBAL_ALPHA;
415  ctx->blend_alpha = ctx->alpha;
416  }
417 
419  ctx->blend_flags |= VA_BLEND_PREMULTIPLIED_ALPHA;
420 
421  return 0;
422 }
423 
425 {
426  AVFilterContext *avctx = outlink->src;
427  OverlayVAAPIContext *ctx = avctx->priv;
428  VAAPIVPPContext *vpp_ctx = avctx->priv;
429  int err;
430 
431  outlink->time_base = avctx->inputs[0]->time_base;
432  vpp_ctx->output_width = avctx->inputs[0]->w;
433  vpp_ctx->output_height = avctx->inputs[0]->h;
434 
435  err = ff_vaapi_vpp_config_output(outlink);
436  if (err < 0)
437  return err;
438 
440  if (err < 0)
441  return err;
442 
443  err = ff_framesync_init_dualinput(&ctx->fs, avctx);
444  if (err < 0)
445  return err;
446 
447  ctx->fs.on_event = overlay_vaapi_blend;
448  ctx->fs.time_base = outlink->time_base;
449 
450  return ff_framesync_configure(&ctx->fs);
451 }
452 
454 {
455  VAAPIVPPContext *vpp_ctx = avctx->priv;
456 
457  ff_vaapi_vpp_ctx_init(avctx);
458  vpp_ctx->output_format = AV_PIX_FMT_NONE;
459 
460  return 0;
461 }
462 
464 {
465  OverlayVAAPIContext *ctx = avctx->priv;
466 
467  return ff_framesync_activate(&ctx->fs);
468 }
469 
471 {
472  OverlayVAAPIContext *ctx = avctx->priv;
473 
474  ff_framesync_uninit(&ctx->fs);
476 }
477 
478 #define OFFSET(x) offsetof(OverlayVAAPIContext, x)
479 #define FLAGS (AV_OPT_FLAG_FILTERING_PARAM | AV_OPT_FLAG_VIDEO_PARAM)
480 static const AVOption overlay_vaapi_options[] = {
481  { "x", "Overlay x position", OFFSET(overlay_ox), AV_OPT_TYPE_STRING, { .str="0"}, 0, 255, .flags = FLAGS},
482  { "y", "Overlay y position", OFFSET(overlay_oy), AV_OPT_TYPE_STRING, { .str="0"}, 0, 255, .flags = FLAGS},
483  { "w", "Overlay width", OFFSET(overlay_ow), AV_OPT_TYPE_STRING, { .str="overlay_iw"}, 0, 255, .flags = FLAGS},
484  { "h", "Overlay height", OFFSET(overlay_oh), AV_OPT_TYPE_STRING, { .str="overlay_ih*w/overlay_iw"}, 0, 255, .flags = FLAGS},
485  { "alpha", "Overlay global alpha", OFFSET(alpha), AV_OPT_TYPE_FLOAT, { .dbl = 1.0 }, 0.0, 1.0, .flags = FLAGS },
486  { "eof_action", "Action to take when encountering EOF from secondary input ",
487  OFFSET(fs.opt_eof_action), AV_OPT_TYPE_INT, { .i64 = EOF_ACTION_REPEAT },
488  EOF_ACTION_REPEAT, EOF_ACTION_PASS, .flags = FLAGS, "eof_action" },
489  { "repeat", "Repeat the previous frame.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_REPEAT }, .flags = FLAGS, "eof_action" },
490  { "endall", "End both streams.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_ENDALL }, .flags = FLAGS, "eof_action" },
491  { "pass", "Pass through the main input.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_PASS }, .flags = FLAGS, "eof_action" },
492  { "shortest", "force termination when the shortest input terminates", OFFSET(fs.opt_shortest), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
493  { "repeatlast", "repeat overlay of the last overlay frame", OFFSET(fs.opt_repeatlast), AV_OPT_TYPE_BOOL, { .i64 = 1 }, 0, 1, FLAGS },
494  { NULL },
495 };
496 
498 
500  {
501  .name = "main",
502  .type = AVMEDIA_TYPE_VIDEO,
503  .config_props = overlay_vaapi_config_input_main,
504  },
505  {
506  .name = "overlay",
507  .type = AVMEDIA_TYPE_VIDEO,
508  .config_props = overlay_vaapi_config_input_overlay,
509  },
510 };
511 
513  {
514  .name = "default",
515  .type = AVMEDIA_TYPE_VIDEO,
516  .config_props = &overlay_vaapi_config_output,
517  },
518 };
519 
521  .name = "overlay_vaapi",
522  .description = NULL_IF_CONFIG_SMALL("Overlay one video on top of another"),
523  .priv_size = sizeof(OverlayVAAPIContext),
524  .priv_class = &overlay_vaapi_class,
528  .preinit = overlay_vaapi_framesync_preinit,
532  .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
533 };
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:101
ff_vaapi_vpp_ctx_init
void ff_vaapi_vpp_ctx_init(AVFilterContext *avctx)
Definition: vaapi_vpp.c:666
OFFSET
#define OFFSET(x)
Definition: vf_overlay_vaapi.c:478
ff_framesync_configure
int ff_framesync_configure(FFFrameSync *fs)
Configure a frame sync structure.
Definition: framesync.c:134
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
OverlayVAAPIContext::alpha
float alpha
Definition: vf_overlay_vaapi.c:57
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
OverlayVAAPIContext::overlay_ox
char * overlay_ox
Definition: vf_overlay_vaapi.c:49
ff_framesync_uninit
void ff_framesync_uninit(FFFrameSync *fs)
Free all memory currently allocated.
Definition: framesync.c:304
FLAGS
#define FLAGS
Definition: vf_overlay_vaapi.c:479
FF_FILTER_FLAG_HWFRAME_AWARE
#define FF_FILTER_FLAG_HWFRAME_AWARE
The filter is aware of hardware frames, and any hardware frame context should not be automatically pr...
Definition: internal.h:370
OverlayVAAPIContext::var_values
double var_values[VAR_VARS_NB]
Definition: vf_overlay_vaapi.c:48
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1009
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2858
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
OverlayVAAPIContext::vpp_ctx
VAAPIVPPContext vpp_ctx
must be the first field
Definition: vf_overlay_vaapi.c:45
ff_framesync_get_frame
int ff_framesync_get_frame(FFFrameSync *fs, unsigned in, AVFrame **rframe, unsigned get)
Get the current frame in an input.
Definition: framesync.c:267
output
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce output
Definition: filter_design.txt:225
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:116
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:325
pixdesc.h
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:432
AVFrame::width
int width
Definition: frame.h:397
overlay_vaapi_options
static const AVOption overlay_vaapi_options[]
Definition: vf_overlay_vaapi.c:480
AVOption
AVOption.
Definition: opt.h:251
EOF_ACTION_ENDALL
@ EOF_ACTION_ENDALL
Definition: framesync.h:28
overlay_vaapi_outputs
static const AVFilterPad overlay_vaapi_outputs[]
Definition: vf_overlay_vaapi.c:512
VAR_OVERLAY_IW
@ VAR_OVERLAY_IW
Definition: vf_overlay_vaapi.c:35
AVVAAPIDeviceContext::display
VADisplay display
The VADisplay handle, to be filled by the user.
Definition: hwcontext_vaapi.h:72
preinit
static av_cold int preinit(AVFilterContext *ctx)
Definition: af_aresample.c:46
var_names
static const char *const var_names[]
Definition: vf_overlay_vaapi.c:62
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:175
FFFrameSync
Frame sync structure.
Definition: framesync.h:168
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:346
overlay_vaapi_build_filter_params
static int overlay_vaapi_build_filter_params(AVFilterContext *avctx)
Definition: vf_overlay_vaapi.c:130
formats.h
OverlayVAAPIContext::ow
int ow
Definition: vf_overlay_vaapi.c:55
eval_expr
static int eval_expr(AVFilterContext *avctx)
Definition: vf_overlay_vaapi.c:74
VAR_OVERLAY_X
@ VAR_OVERLAY_X
Definition: vf_overlay_vaapi.c:37
AVFilterContext::priv
void * priv
private data for use by the filter
Definition: avfilter.h:430
fail
#define fail()
Definition: checkasm.h:134
overlay_vaapi_config_input_overlay
static int overlay_vaapi_config_input_overlay(AVFilterLink *inlink)
Definition: vf_overlay_vaapi.c:392
PARSE_EXPR
#define PARSE_EXPR(e, s)
overlay_vaapi_init
static av_cold int overlay_vaapi_init(AVFilterContext *avctx)
Definition: vf_overlay_vaapi.c:453
OverlayVAAPIContext::ox
int ox
Definition: vf_overlay_vaapi.c:53
av_expr_free
void av_expr_free(AVExpr *e)
Free a parsed expression previously created with av_expr_parse().
Definition: eval.c:336
OverlayVAAPIContext::blend_flags
unsigned int blend_flags
Definition: vf_overlay_vaapi.c:58
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:49
ff_vf_overlay_vaapi
const AVFilter ff_vf_overlay_vaapi
Definition: vf_overlay_vaapi.c:520
avassert.h
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
av_cold
#define av_cold
Definition: attributes.h:90
OverlayVAAPIContext::blend_alpha
float blend_alpha
Definition: vf_overlay_vaapi.c:59
overlay_vaapi_inputs
static const AVFilterPad overlay_vaapi_inputs[]
Definition: vf_overlay_vaapi.c:499
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts_bsf.c:363
OverlayVAAPIContext::oh
int oh
Definition: vf_overlay_vaapi.c:56
overlay_vaapi_config_input_main
static int overlay_vaapi_config_input_main(AVFilterLink *inlink)
Definition: vf_overlay_vaapi.c:379
var_name
var_name
Definition: noise_bsf.c:46
VAAPIVPPContext::output_width
int output_width
Definition: vaapi_vpp.h:48
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
AV_PIX_FMT_FLAG_ALPHA
#define AV_PIX_FMT_FLAG_ALPHA
The pixel format has an alpha channel.
Definition: pixdesc.h:147
ctx
AVFormatContext * ctx
Definition: movenc.c:48
VAAPIVPPContext::output_format
enum AVPixelFormat output_format
Definition: vaapi_vpp.h:47
av_expr_eval
double av_expr_eval(AVExpr *e, const double *const_values, void *opaque)
Evaluate a previously parsed expression.
Definition: eval.c:766
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demuxing_decoding.c:41
AVExpr
Definition: eval.c:157
VAR_MAIN_IH
@ VAR_MAIN_IH
Definition: vf_overlay_vaapi.c:34
VAAPIVPPContext::hwctx
AVVAAPIDeviceContext * hwctx
Definition: vaapi_vpp.h:36
EOF_ACTION_PASS
@ EOF_ACTION_PASS
Definition: framesync.h:29
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: internal.h:190
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
overlay_vaapi_config_output
static int overlay_vaapi_config_output(AVFilterLink *outlink)
Definition: vf_overlay_vaapi.c:424
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:603
fs
#define fs(width, name, subs,...)
Definition: cbs_vp9.c:258
VAR_OX
@ VAR_OX
Definition: vf_overlay_vaapi.c:37
VAR_OVERLAY_H
@ VAR_OVERLAY_H
Definition: vf_overlay_vaapi.c:40
VAR_OW
@ VAR_OW
Definition: vf_overlay_vaapi.c:39
activate
filter_frame For filters that do not use the activate() callback
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:423
ff_vaapi_vpp_config_input
int ff_vaapi_vpp_config_input(AVFilterLink *inlink)
Definition: vaapi_vpp.c:70
OverlayVAAPIContext
Definition: vf_overlay_vaapi.c:44
ff_vaapi_vpp_ctx_uninit
void ff_vaapi_vpp_ctx_uninit(AVFilterContext *avctx)
Definition: vaapi_vpp.c:680
eval.h
vaapi_vpp.h
VAR_OVERLAY_W
@ VAR_OVERLAY_W
Definition: vf_overlay_vaapi.c:39
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:115
ff_framesync_init_dualinput
int ff_framesync_init_dualinput(FFFrameSync *fs, AVFilterContext *parent)
Initialize a frame sync structure for dualinput.
Definition: framesync.c:372
VAR_MAIN_IW
@ VAR_MAIN_IW
Definition: vf_overlay_vaapi.c:33
overlay_vaapi_render_picture
static int overlay_vaapi_render_picture(AVFilterContext *avctx, VAProcPipelineParameterBuffer *params, VAProcPipelineParameterBuffer *subpic_params, AVFrame *output_frame)
Definition: vf_overlay_vaapi.c:162
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:412
overlay_vaapi_uninit
static av_cold void overlay_vaapi_uninit(AVFilterContext *avctx)
Definition: vf_overlay_vaapi.c:470
output_frame
static int output_frame(H264Context *h, AVFrame *dst, H264Picture *srcp)
Definition: h264dec.c:844
AV_PIX_FMT_VAAPI
@ AV_PIX_FMT_VAAPI
Hardware acceleration through VA-API, data[3] contains a VASurfaceID.
Definition: pixfmt.h:119
VAR_MW
@ VAR_MW
Definition: vf_overlay_vaapi.c:33
AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS
@ AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS
The driver does not destroy parameter buffers when they are used by vaRenderPicture().
Definition: hwcontext_vaapi.h:47
internal.h
AV_OPT_TYPE_FLOAT
@ AV_OPT_TYPE_FLOAT
Definition: opt.h:228
FILTER_SINGLE_PIXFMT
#define FILTER_SINGLE_PIXFMT(pix_fmt_)
Definition: internal.h:180
VAR_OVERLAY_IH
@ VAR_OVERLAY_IH
Definition: vf_overlay_vaapi.c:36
VAAPIVPPContext::output_height
int output_height
Definition: vaapi_vpp.h:49
VAR_MH
@ VAR_MH
Definition: vf_overlay_vaapi.c:34
VAAPIVPPContext::filter_buffers
VABufferID filter_buffers[VAProcFilterCount]
Definition: vaapi_vpp.h:51
VAR_OVERLAY_Y
@ VAR_OVERLAY_Y
Definition: vf_overlay_vaapi.c:38
VAR_VARS_NB
@ VAR_VARS_NB
Definition: vf_overlay_vaapi.c:41
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:55
OverlayVAAPIContext::oy
int oy
Definition: vf_overlay_vaapi.c:54
VAR_OY
@ VAR_OY
Definition: vf_overlay_vaapi.c:38
AVFilter
Filter definition.
Definition: avfilter.h:171
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
ret
ret
Definition: filter_design.txt:187
VAAPIVPPContext
Definition: vaapi_vpp.h:33
OverlayVAAPIContext::overlay_oy
char * overlay_oy
Definition: vf_overlay_vaapi.c:50
VAAPIVPPContext::va_context
VAContextID va_context
Definition: vaapi_vpp.h:41
EOF_ACTION_REPEAT
@ EOF_ACTION_REPEAT
Definition: framesync.h:27
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:664
AVFrame::height
int height
Definition: frame.h:397
framesync.h
ff_vaapi_vpp_config_output
int ff_vaapi_vpp_config_output(AVFilterLink *outlink)
Definition: vaapi_vpp.c:95
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:225
avfilter.h
overlay_vaapi_activate
static int overlay_vaapi_activate(AVFilterContext *avctx)
Definition: vf_overlay_vaapi.c:463
AVFilterContext
An instance of a filter.
Definition: avfilter.h:415
VAAPIVPPContext::nb_filter_buffers
int nb_filter_buffers
Definition: vaapi_vpp.h:52
desc
const char * desc
Definition: libsvtav1.c:83
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
VAR_OH
@ VAR_OH
Definition: vf_overlay_vaapi.c:40
mem.h
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
alpha
static const int16_t alpha[]
Definition: ilbcdata.h:55
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Definition: opt.h:244
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: internal.h:191
FRAMESYNC_DEFINE_CLASS
FRAMESYNC_DEFINE_CLASS(overlay_vaapi, OverlayVAAPIContext, fs)
OverlayVAAPIContext::overlay_oh
char * overlay_oh
Definition: vf_overlay_vaapi.c:52
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
have_alpha_planar
static int have_alpha_planar(AVFilterLink *link)
Definition: vf_overlay_vaapi.c:361
overlay_vaapi_blend
static int overlay_vaapi_blend(FFFrameSync *fs)
Definition: vf_overlay_vaapi.c:262
uninit
static av_cold int uninit(AVCodecContext *avctx)
Definition: crystalhd.c:285
OverlayVAAPIContext::overlay_ow
char * overlay_ow
Definition: vf_overlay_vaapi.c:51
ff_framesync_activate
int ff_framesync_activate(FFFrameSync *fs)
Examine the frames in the filter's input and try to produce output.
Definition: framesync.c:355
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Definition: opt.h:229
VAAPI_VPP_BACKGROUND_BLACK
#define VAAPI_VPP_BACKGROUND_BLACK
Definition: vaapi_vpp.h:31
int
int
Definition: ffmpeg_filter.c:156
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:234
OverlayVAAPIContext::fs
FFFrameSync fs
Definition: vf_overlay_vaapi.c:46
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2778
ff_vaapi_vpp_init_params
int ff_vaapi_vpp_init_params(AVFilterContext *avctx, VAProcPipelineParameterBuffer *params, const AVFrame *input_frame, AVFrame *output_frame)
Definition: vaapi_vpp.c:515
AVFilterContext::outputs
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:427