FFmpeg
vf_scale_npp.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 /**
20  * @file
21  * scale video filter
22  */
23 
24 #include <nppi.h>
25 #include <stdio.h>
26 #include <string.h>
27 
28 #include "libavutil/hwcontext.h"
30 #include "libavutil/cuda_check.h"
31 #include "libavutil/internal.h"
32 #include "libavutil/mem.h"
33 #include "libavutil/opt.h"
34 #include "libavutil/parseutils.h"
35 #include "libavutil/eval.h"
36 #include "libavutil/pixdesc.h"
37 
38 #include "avfilter.h"
39 #include "filters.h"
40 #include "formats.h"
41 #include "scale_eval.h"
42 #include "video.h"
43 
44 #define CHECK_CU(x) FF_CUDA_CHECK_DL(ctx, device_hwctx->internal->cuda_dl, x)
45 
46 static const enum AVPixelFormat supported_formats[] = {
51 };
52 
53 static const enum AVPixelFormat deinterleaved_formats[][2] = {
55 };
56 
57 enum ScaleStage {
62 };
63 
64 typedef struct NPPScaleStageContext {
68 
69  struct {
70  int width;
71  int height;
72  } planes_in[4], planes_out[4];
73 
77 
78 static const char *const var_names[] = {
79  "in_w", "iw",
80  "in_h", "ih",
81  "out_w", "ow",
82  "out_h", "oh",
83  "a",
84  "sar",
85  "dar",
86  "n",
87  "t",
88 #if FF_API_FRAME_PKT
89  "pos",
90 #endif
91  "main_w",
92  "main_h",
93  "main_a",
94  "main_sar",
95  "main_dar", "mdar",
96  "main_n",
97  "main_t",
98 #if FF_API_FRAME_PKT
99  "main_pos",
100 #endif
101  NULL
102 };
103 
104 enum var_name {
114 #if FF_API_FRAME_PKT
115  VAR_POS,
116 #endif
124 #if FF_API_FRAME_PKT
126 #endif
128 };
129 
130 enum EvalMode {
134 };
135 
136 typedef struct NPPScaleContext {
137  const AVClass *class;
138 
142 
144 
145  /**
146  * New dimensions. Special values are:
147  * 0 = original width/height
148  * -1 = keep original aspect
149  */
150  int w, h;
151 
152  /**
153  * Output sw format. AV_PIX_FMT_NONE for no conversion.
154  */
156 
157  char *w_expr; ///< width expression string
158  char *h_expr; ///< height expression string
159  char *format_str;
160 
163 
165 
166  char* size_str;
167 
170 
172 
175 
177 
178 static int config_props(AVFilterLink *outlink);
179 
181 {
182  NPPScaleContext* scale = ctx->priv;
183  unsigned vars_w[VARS_NB] = {0}, vars_h[VARS_NB] = {0};
184 
185  if (!scale->w_pexpr && !scale->h_pexpr)
186  return AVERROR(EINVAL);
187 
188  if (scale->w_pexpr)
189  av_expr_count_vars(scale->w_pexpr, vars_w, VARS_NB);
190  if (scale->h_pexpr)
191  av_expr_count_vars(scale->h_pexpr, vars_h, VARS_NB);
192 
193  if (vars_w[VAR_OUT_W] || vars_w[VAR_OW]) {
194  av_log(ctx, AV_LOG_ERROR, "Width expression cannot be self-referencing: '%s'.\n", scale->w_expr);
195  return AVERROR(EINVAL);
196  }
197 
198  if (vars_h[VAR_OUT_H] || vars_h[VAR_OH]) {
199  av_log(ctx, AV_LOG_ERROR, "Height expression cannot be self-referencing: '%s'.\n", scale->h_expr);
200  return AVERROR(EINVAL);
201  }
202 
203  if ((vars_w[VAR_OUT_H] || vars_w[VAR_OH]) &&
204  (vars_h[VAR_OUT_W] || vars_h[VAR_OW])) {
205  av_log(ctx, AV_LOG_WARNING, "Circular references detected for width '%s' and height '%s' - possibly invalid.\n", scale->w_expr, scale->h_expr);
206  }
207 
208  if (ctx->filter != &ff_vf_scale2ref_npp &&
209  (vars_w[VAR_S2R_MAIN_W] || vars_h[VAR_S2R_MAIN_W] ||
210  vars_w[VAR_S2R_MAIN_H] || vars_h[VAR_S2R_MAIN_H] ||
211  vars_w[VAR_S2R_MAIN_A] || vars_h[VAR_S2R_MAIN_A] ||
212  vars_w[VAR_S2R_MAIN_SAR] || vars_h[VAR_S2R_MAIN_SAR] ||
213  vars_w[VAR_S2R_MAIN_DAR] || vars_h[VAR_S2R_MAIN_DAR] ||
214  vars_w[VAR_S2R_MDAR] || vars_h[VAR_S2R_MDAR] ||
215  vars_w[VAR_S2R_MAIN_N] || vars_h[VAR_S2R_MAIN_N] ||
216  vars_w[VAR_S2R_MAIN_T] || vars_h[VAR_S2R_MAIN_T]
218  || vars_w[VAR_S2R_MAIN_POS] || vars_h[VAR_S2R_MAIN_POS]
219 #endif
220  )) {
221  av_log(ctx, AV_LOG_ERROR, "Expressions with scale2ref_npp variables are not valid in scale_npp filter.\n");
222  return AVERROR(EINVAL);
223  }
224 
225  if (scale->eval_mode == EVAL_MODE_INIT &&
226  (vars_w[VAR_N] || vars_h[VAR_N] ||
227  vars_w[VAR_T] || vars_h[VAR_T] ||
229  vars_w[VAR_POS] || vars_h[VAR_POS] ||
230 #endif
231  vars_w[VAR_S2R_MAIN_N] || vars_h[VAR_S2R_MAIN_N] ||
232  vars_w[VAR_S2R_MAIN_T] || vars_h[VAR_S2R_MAIN_T]
234  || vars_w[VAR_S2R_MAIN_POS] || vars_h[VAR_S2R_MAIN_POS]
235 #endif
236  ) ) {
237  av_log(ctx, AV_LOG_ERROR, "Expressions with frame variables 'n', 't', are not valid in init eval_mode.\n");
238  return AVERROR(EINVAL);
239  }
240 
241  return 0;
242 }
243 
244 static int nppscale_parse_expr(AVFilterContext* ctx, char* str_expr,
245  AVExpr** pexpr_ptr, const char* var,
246  const char* args)
247 {
248  NPPScaleContext* scale = ctx->priv;
249  int ret, is_inited = 0;
250  char* old_str_expr = NULL;
251  AVExpr* old_pexpr = NULL;
252 
253  if (str_expr) {
254  old_str_expr = av_strdup(str_expr);
255  if (!old_str_expr)
256  return AVERROR(ENOMEM);
257  av_opt_set(scale, var, args, 0);
258  }
259 
260  if (*pexpr_ptr) {
261  old_pexpr = *pexpr_ptr;
262  *pexpr_ptr = NULL;
263  is_inited = 1;
264  }
265 
266  ret = av_expr_parse(pexpr_ptr, args, var_names, NULL, NULL, NULL, NULL, 0,
267  ctx);
268  if (ret < 0) {
269  av_log(ctx, AV_LOG_ERROR, "Cannot parse expression for %s: '%s'\n", var,
270  args);
271  goto revert;
272  }
273 
274  ret = check_exprs(ctx);
275  if (ret < 0)
276  goto revert;
277 
278  if (is_inited && (ret = config_props(ctx->outputs[0])) < 0)
279  goto revert;
280 
281  av_expr_free(old_pexpr);
282  old_pexpr = NULL;
283  av_freep(&old_str_expr);
284 
285  return 0;
286 
287 revert:
288  av_expr_free(*pexpr_ptr);
289  *pexpr_ptr = NULL;
290  if (old_str_expr) {
291  av_opt_set(scale, var, old_str_expr, 0);
292  av_free(old_str_expr);
293  }
294  if (old_pexpr)
295  *pexpr_ptr = old_pexpr;
296 
297  return ret;
298 }
299 
301 {
302  NPPScaleContext* scale = ctx->priv;
303  int i, ret;
304 
305  if (!strcmp(scale->format_str, "same")) {
306  scale->format = AV_PIX_FMT_NONE;
307  } else {
308  scale->format = av_get_pix_fmt(scale->format_str);
309  if (scale->format == AV_PIX_FMT_NONE) {
310  av_log(ctx, AV_LOG_ERROR, "Unrecognized pixel format: %s\n", scale->format_str);
311  return AVERROR(EINVAL);
312  }
313  }
314 
315  if (scale->size_str && (scale->w_expr || scale->h_expr)) {
317  "Size and width/height exprs cannot be set at the same time.\n");
318  return AVERROR(EINVAL);
319  }
320 
321  if (scale->w_expr && !scale->h_expr)
322  FFSWAP(char*, scale->w_expr, scale->size_str);
323 
324  if (scale->size_str) {
325  char buf[32];
326  ret = av_parse_video_size(&scale->w, &scale->h, scale->size_str);
327  if (0 > ret) {
328  av_log(ctx, AV_LOG_ERROR, "Invalid size '%s'\n", scale->size_str);
329  return ret;
330  }
331 
332  snprintf(buf, sizeof(buf) - 1, "%d", scale->w);
333  ret = av_opt_set(scale, "w", buf, 0);
334  if (ret < 0)
335  return ret;
336 
337  snprintf(buf, sizeof(buf) - 1, "%d", scale->h);
338  ret = av_opt_set(scale, "h", buf, 0);
339  if (ret < 0)
340  return ret;
341  }
342 
343  if (!scale->w_expr) {
344  ret = av_opt_set(scale, "w", "iw", 0);
345  if (ret < 0)
346  return ret;
347  }
348 
349  if (!scale->h_expr) {
350  ret = av_opt_set(scale, "h", "ih", 0);
351  if (ret < 0)
352  return ret;
353  }
354 
355  ret = nppscale_parse_expr(ctx, NULL, &scale->w_pexpr, "width", scale->w_expr);
356  if (ret < 0)
357  return ret;
358 
359  ret = nppscale_parse_expr(ctx, NULL, &scale->h_pexpr, "height", scale->h_expr);
360  if (ret < 0)
361  return ret;
362 
363  for (i = 0; i < FF_ARRAY_ELEMS(scale->stages); i++) {
364  scale->stages[i].frame = av_frame_alloc();
365  if (!scale->stages[i].frame)
366  return AVERROR(ENOMEM);
367  }
368  scale->tmp_frame = av_frame_alloc();
369  if (!scale->tmp_frame)
370  return AVERROR(ENOMEM);
371 
372  return 0;
373 }
374 
376 {
377  NPPScaleContext* scale = ctx->priv;
378  const char scale2ref = ctx->filter == &ff_vf_scale2ref_npp;
379  const AVFilterLink* inlink = ctx->inputs[scale2ref ? 1 : 0];
380  char* expr;
381  int eval_w, eval_h;
382  int ret;
383  double res;
384 
385  scale->var_values[VAR_IN_W] = scale->var_values[VAR_IW] = inlink->w;
386  scale->var_values[VAR_IN_H] = scale->var_values[VAR_IH] = inlink->h;
387  scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = NAN;
388  scale->var_values[VAR_OUT_H] = scale->var_values[VAR_OH] = NAN;
389  scale->var_values[VAR_A] = (double)inlink->w / inlink->h;
390  scale->var_values[VAR_SAR] = inlink->sample_aspect_ratio.num ?
391  (double)inlink->sample_aspect_ratio.num / inlink->sample_aspect_ratio.den : 1;
392  scale->var_values[VAR_DAR] = scale->var_values[VAR_A] * scale->var_values[VAR_SAR];
393 
394  if (scale2ref) {
395  const AVFilterLink* main_link = ctx->inputs[0];
396 
397  scale->var_values[VAR_S2R_MAIN_W] = main_link->w;
398  scale->var_values[VAR_S2R_MAIN_H] = main_link->h;
399  scale->var_values[VAR_S2R_MAIN_A] = (double)main_link->w / main_link->h;
400  scale->var_values[VAR_S2R_MAIN_SAR] = main_link->sample_aspect_ratio.num ?
401  (double)main_link->sample_aspect_ratio.num / main_link->sample_aspect_ratio.den : 1;
402  scale->var_values[VAR_S2R_MAIN_DAR] = scale->var_values[VAR_S2R_MDAR] =
403  scale->var_values[VAR_S2R_MAIN_A] * scale->var_values[VAR_S2R_MAIN_SAR];
404  }
405 
406  res = av_expr_eval(scale->w_pexpr, scale->var_values, NULL);
407  eval_w = scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = (int)res == 0 ? inlink->w : (int)res;
408 
409  res = av_expr_eval(scale->h_pexpr, scale->var_values, NULL);
410  if (isnan(res)) {
411  expr = scale->h_expr;
412  ret = AVERROR(EINVAL);
413  goto fail;
414  }
415  eval_h = scale->var_values[VAR_OUT_H] = scale->var_values[VAR_OH] = (int)res == 0 ? inlink->h : (int)res;
416 
417  res = av_expr_eval(scale->w_pexpr, scale->var_values, NULL);
418  if (isnan(res)) {
419  expr = scale->w_expr;
420  ret = AVERROR(EINVAL);
421  goto fail;
422  }
423  eval_w = scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = (int)res == 0 ? inlink->w : (int)res;
424 
425  scale->w = eval_w;
426  scale->h = eval_h;
427 
428  return 0;
429 
430 fail:
431  av_log(ctx, AV_LOG_ERROR, "Error when evaluating the expression '%s'.\n",
432  expr);
433  return ret;
434 }
435 
437 {
438  NPPScaleContext *s = ctx->priv;
439  int i;
440 
441  for (i = 0; i < FF_ARRAY_ELEMS(s->stages); i++) {
442  av_frame_free(&s->stages[i].frame);
443  av_buffer_unref(&s->stages[i].frames_ctx);
444  }
445  av_frame_free(&s->tmp_frame);
446 
447  av_expr_free(s->w_pexpr);
448  av_expr_free(s->h_pexpr);
449  s->w_pexpr = s->h_pexpr = NULL;
450 }
451 
452 static int init_stage(NPPScaleStageContext *stage, AVBufferRef *device_ctx)
453 {
454  AVBufferRef *out_ref = NULL;
455  AVHWFramesContext *out_ctx;
456  int in_sw, in_sh, out_sw, out_sh;
457  int ret, i;
458 
459  av_pix_fmt_get_chroma_sub_sample(stage->in_fmt, &in_sw, &in_sh);
460  av_pix_fmt_get_chroma_sub_sample(stage->out_fmt, &out_sw, &out_sh);
461  if (!stage->planes_out[0].width) {
462  stage->planes_out[0].width = stage->planes_in[0].width;
463  stage->planes_out[0].height = stage->planes_in[0].height;
464  }
465 
466  for (i = 1; i < FF_ARRAY_ELEMS(stage->planes_in); i++) {
467  stage->planes_in[i].width = stage->planes_in[0].width >> in_sw;
468  stage->planes_in[i].height = stage->planes_in[0].height >> in_sh;
469  stage->planes_out[i].width = stage->planes_out[0].width >> out_sw;
470  stage->planes_out[i].height = stage->planes_out[0].height >> out_sh;
471  }
472 
473  if (AV_PIX_FMT_YUVA420P == stage->in_fmt) {
474  stage->planes_in[3].width = stage->planes_in[0].width;
475  stage->planes_in[3].height = stage->planes_in[0].height;
476  stage->planes_out[3].width = stage->planes_out[0].width;
477  stage->planes_out[3].height = stage->planes_out[0].height;
478  }
479 
480  out_ref = av_hwframe_ctx_alloc(device_ctx);
481  if (!out_ref)
482  return AVERROR(ENOMEM);
483  out_ctx = (AVHWFramesContext*)out_ref->data;
484 
485  out_ctx->format = AV_PIX_FMT_CUDA;
486  out_ctx->sw_format = stage->out_fmt;
487  out_ctx->width = FFALIGN(stage->planes_out[0].width, 32);
488  out_ctx->height = FFALIGN(stage->planes_out[0].height, 32);
489 
490  ret = av_hwframe_ctx_init(out_ref);
491  if (ret < 0)
492  goto fail;
493 
494  av_frame_unref(stage->frame);
495  ret = av_hwframe_get_buffer(out_ref, stage->frame, 0);
496  if (ret < 0)
497  goto fail;
498 
499  stage->frame->width = stage->planes_out[0].width;
500  stage->frame->height = stage->planes_out[0].height;
501 
502  av_buffer_unref(&stage->frames_ctx);
503  stage->frames_ctx = out_ref;
504 
505  return 0;
506 fail:
507  av_buffer_unref(&out_ref);
508  return ret;
509 }
510 
511 static int format_is_supported(enum AVPixelFormat fmt)
512 {
513  int i;
514 
515  for (i = 0; i < FF_ARRAY_ELEMS(supported_formats); i++)
516  if (supported_formats[i] == fmt)
517  return 1;
518  return 0;
519 }
520 
522 {
524  int i, planes;
525 
527  if (planes == desc->nb_components)
528  return fmt;
529  for (i = 0; i < FF_ARRAY_ELEMS(deinterleaved_formats); i++)
530  if (deinterleaved_formats[i][0] == fmt)
531  return deinterleaved_formats[i][1];
532  return AV_PIX_FMT_NONE;
533 }
534 
535 static int init_processing_chain(AVFilterContext *ctx, int in_width, int in_height,
536  int out_width, int out_height)
537 {
538  NPPScaleContext *s = ctx->priv;
539  FilterLink *inl = ff_filter_link(ctx->inputs[0]);
540  FilterLink *outl = ff_filter_link(ctx->outputs[0]);
541 
542  AVHWFramesContext *in_frames_ctx;
543 
544  enum AVPixelFormat in_format;
545  enum AVPixelFormat out_format;
546  enum AVPixelFormat in_deinterleaved_format;
547  enum AVPixelFormat out_deinterleaved_format;
548 
549  int i, ret, last_stage = -1;
550 
551  /* check that we have a hw context */
552  if (!inl->hw_frames_ctx) {
553  av_log(ctx, AV_LOG_ERROR, "No hw context provided on input\n");
554  return AVERROR(EINVAL);
555  }
556  in_frames_ctx = (AVHWFramesContext*)inl->hw_frames_ctx->data;
557  in_format = in_frames_ctx->sw_format;
558  out_format = (s->format == AV_PIX_FMT_NONE) ? in_format : s->format;
559 
560  if (!format_is_supported(in_format)) {
561  av_log(ctx, AV_LOG_ERROR, "Unsupported input format: %s\n",
562  av_get_pix_fmt_name(in_format));
563  return AVERROR(ENOSYS);
564  }
565  if (!format_is_supported(out_format)) {
566  av_log(ctx, AV_LOG_ERROR, "Unsupported output format: %s\n",
567  av_get_pix_fmt_name(out_format));
568  return AVERROR(ENOSYS);
569  }
570 
571  in_deinterleaved_format = get_deinterleaved_format(in_format);
572  out_deinterleaved_format = get_deinterleaved_format(out_format);
573  if (in_deinterleaved_format == AV_PIX_FMT_NONE ||
574  out_deinterleaved_format == AV_PIX_FMT_NONE)
575  return AVERROR_BUG;
576 
577  /* figure out which stages need to be done */
578  if (in_width != out_width || in_height != out_height ||
579  in_deinterleaved_format != out_deinterleaved_format) {
580  s->stages[STAGE_RESIZE].stage_needed = 1;
581 
582  if (s->interp_algo == NPPI_INTER_SUPER &&
583  (out_width > in_width && out_height > in_height)) {
584  s->interp_algo = NPPI_INTER_LANCZOS;
585  av_log(ctx, AV_LOG_WARNING, "super-sampling not supported for output dimensions, using lanczos instead.\n");
586  }
587  if (s->interp_algo == NPPI_INTER_SUPER &&
588  !(out_width < in_width && out_height < in_height)) {
589  s->interp_algo = NPPI_INTER_CUBIC;
590  av_log(ctx, AV_LOG_WARNING, "super-sampling not supported for output dimensions, using cubic instead.\n");
591  }
592  }
593 
594  if (!s->stages[STAGE_RESIZE].stage_needed && in_format == out_format)
595  s->passthrough = 1;
596 
597  if (!s->passthrough) {
598  if (in_format != in_deinterleaved_format)
599  s->stages[STAGE_DEINTERLEAVE].stage_needed = 1;
600  if (out_format != out_deinterleaved_format)
601  s->stages[STAGE_INTERLEAVE].stage_needed = 1;
602  }
603 
604  s->stages[STAGE_DEINTERLEAVE].in_fmt = in_format;
605  s->stages[STAGE_DEINTERLEAVE].out_fmt = in_deinterleaved_format;
606  s->stages[STAGE_DEINTERLEAVE].planes_in[0].width = in_width;
607  s->stages[STAGE_DEINTERLEAVE].planes_in[0].height = in_height;
608 
609  s->stages[STAGE_RESIZE].in_fmt = in_deinterleaved_format;
610  s->stages[STAGE_RESIZE].out_fmt = out_deinterleaved_format;
611  s->stages[STAGE_RESIZE].planes_in[0].width = in_width;
612  s->stages[STAGE_RESIZE].planes_in[0].height = in_height;
613  s->stages[STAGE_RESIZE].planes_out[0].width = out_width;
614  s->stages[STAGE_RESIZE].planes_out[0].height = out_height;
615 
616  s->stages[STAGE_INTERLEAVE].in_fmt = out_deinterleaved_format;
617  s->stages[STAGE_INTERLEAVE].out_fmt = out_format;
618  s->stages[STAGE_INTERLEAVE].planes_in[0].width = out_width;
619  s->stages[STAGE_INTERLEAVE].planes_in[0].height = out_height;
620 
621  /* init the hardware contexts */
622  for (i = 0; i < FF_ARRAY_ELEMS(s->stages); i++) {
623  if (!s->stages[i].stage_needed)
624  continue;
625 
626  ret = init_stage(&s->stages[i], in_frames_ctx->device_ref);
627  if (ret < 0)
628  return ret;
629 
630  last_stage = i;
631  }
632 
633  if (last_stage >= 0)
634  outl->hw_frames_ctx = av_buffer_ref(s->stages[last_stage].frames_ctx);
635  else
637 
638  if (!outl->hw_frames_ctx)
639  return AVERROR(ENOMEM);
640 
641  return 0;
642 }
643 
644 static int config_props(AVFilterLink *outlink)
645 {
646  AVFilterContext *ctx = outlink->src;
647  AVFilterLink *inlink0 = outlink->src->inputs[0];
648  AVFilterLink *inlink = ctx->filter == &ff_vf_scale2ref_npp ?
649  outlink->src->inputs[1] :
650  outlink->src->inputs[0];
651  NPPScaleContext *s = ctx->priv;
652  int ret;
653 
654  if ((ret = nppscale_eval_dimensions(ctx)) < 0)
655  goto fail;
656 
658  s->force_original_aspect_ratio,
659  s->force_divisible_by);
660 
661  if (s->w > INT_MAX || s->h > INT_MAX ||
662  (s->h * inlink->w) > INT_MAX ||
663  (s->w * inlink->h) > INT_MAX)
664  av_log(ctx, AV_LOG_ERROR, "Rescaled value for width or height is too big.\n");
665 
666  outlink->w = s->w;
667  outlink->h = s->h;
668 
669  ret = init_processing_chain(ctx, inlink0->w, inlink0->h, outlink->w, outlink->h);
670  if (ret < 0)
671  return ret;
672 
673  av_log(ctx, AV_LOG_VERBOSE, "w:%d h:%d -> w:%d h:%d\n",
674  inlink->w, inlink->h, outlink->w, outlink->h);
675 
676  if (inlink->sample_aspect_ratio.num)
677  outlink->sample_aspect_ratio = av_mul_q((AVRational){outlink->h*inlink->w,
678  outlink->w*inlink->h},
679  inlink->sample_aspect_ratio);
680  else
681  outlink->sample_aspect_ratio = inlink->sample_aspect_ratio;
682 
683  return 0;
684 
685 fail:
686  return ret;
687 }
688 
689 static int config_props_ref(AVFilterLink *outlink)
690 {
691  FilterLink *outl = ff_filter_link(outlink);
692  AVFilterLink *inlink = outlink->src->inputs[1];
694  FilterLink *ol = ff_filter_link(outlink);
695 
696  outlink->w = inlink->w;
697  outlink->h = inlink->h;
698  outlink->sample_aspect_ratio = inlink->sample_aspect_ratio;
699  outlink->time_base = inlink->time_base;
700  ol->frame_rate = inl->frame_rate;
701 
703 
704  return 0;
705 }
706 
708  AVFrame *out, AVFrame *in)
709 {
710  AVHWFramesContext *in_frames_ctx = (AVHWFramesContext*)in->hw_frames_ctx->data;
711  NppStatus err;
712 
713  switch (in_frames_ctx->sw_format) {
714  case AV_PIX_FMT_NV12:
715  err = nppiYCbCr420_8u_P2P3R(in->data[0], in->linesize[0],
716  in->data[1], in->linesize[1],
717  out->data, out->linesize,
718  (NppiSize){ in->width, in->height });
719  break;
720  default:
721  return AVERROR_BUG;
722  }
723  if (err != NPP_SUCCESS) {
724  av_log(ctx, AV_LOG_ERROR, "NPP deinterleave error: %d\n", err);
725  return AVERROR_UNKNOWN;
726  }
727 
728  return 0;
729 }
730 
732  AVFrame *out, AVFrame *in)
733 {
734  NPPScaleContext *s = ctx->priv;
735  NppStatus err;
736  int i;
737 
738  for (i = 0; i < FF_ARRAY_ELEMS(stage->planes_in) && i < FF_ARRAY_ELEMS(in->data) && in->data[i]; i++) {
739  int iw = stage->planes_in[i].width;
740  int ih = stage->planes_in[i].height;
741  int ow = stage->planes_out[i].width;
742  int oh = stage->planes_out[i].height;
743 
744  err = nppiResizeSqrPixel_8u_C1R(in->data[i], (NppiSize){ iw, ih },
745  in->linesize[i], (NppiRect){ 0, 0, iw, ih },
746  out->data[i], out->linesize[i],
747  (NppiRect){ 0, 0, ow, oh },
748  (double)ow / iw, (double)oh / ih,
749  0.0, 0.0, s->interp_algo);
750  if (err != NPP_SUCCESS) {
751  av_log(ctx, AV_LOG_ERROR, "NPP resize error: %d\n", err);
752  return AVERROR_UNKNOWN;
753  }
754  }
755 
756  return 0;
757 }
758 
760  AVFrame *out, AVFrame *in)
761 {
762  AVHWFramesContext *out_frames_ctx = (AVHWFramesContext*)out->hw_frames_ctx->data;
763  NppStatus err;
764 
765  switch (out_frames_ctx->sw_format) {
766  case AV_PIX_FMT_NV12:
767  err = nppiYCbCr420_8u_P3P2R((const uint8_t**)in->data,
768  in->linesize,
769  out->data[0], out->linesize[0],
770  out->data[1], out->linesize[1],
771  (NppiSize){ in->width, in->height });
772  break;
773  default:
774  return AVERROR_BUG;
775  }
776  if (err != NPP_SUCCESS) {
777  av_log(ctx, AV_LOG_ERROR, "NPP deinterleave error: %d\n", err);
778  return AVERROR_UNKNOWN;
779  }
780 
781  return 0;
782 }
783 
785  AVFrame *out, AVFrame *in) = {
789 };
790 
792 {
794  AVFilterContext *ctx = link->dst;
795  NPPScaleContext *s = ctx->priv;
796  AVFilterLink *outlink = ctx->outputs[0];
797  AVFrame *src = in;
798  char buf[32];
799  int i, ret, last_stage = -1;
800  int frame_changed;
801 
802  frame_changed = in->width != link->w ||
803  in->height != link->h ||
804  in->format != link->format ||
807 
808  if (s->eval_mode == EVAL_MODE_FRAME || frame_changed) {
809  unsigned vars_w[VARS_NB] = { 0 }, vars_h[VARS_NB] = { 0 };
810 
811  av_expr_count_vars(s->w_pexpr, vars_w, VARS_NB);
812  av_expr_count_vars(s->h_pexpr, vars_h, VARS_NB);
813 
814  if (s->eval_mode == EVAL_MODE_FRAME && !frame_changed && ctx->filter != &ff_vf_scale2ref_npp &&
815  !(vars_w[VAR_N] || vars_w[VAR_T]
817  || vars_w[VAR_POS]
818 #endif
819  ) &&
820  !(vars_h[VAR_N] || vars_h[VAR_T]
822  || vars_h[VAR_POS]
823 #endif
824  ) && s->w && s->h)
825  goto scale;
826 
827  if (s->eval_mode == EVAL_MODE_INIT) {
828  snprintf(buf, sizeof(buf)-1, "%d", outlink->w);
829  av_opt_set(s, "w", buf, 0);
830  snprintf(buf, sizeof(buf)-1, "%d", outlink->h);
831  av_opt_set(s, "h", buf, 0);
832 
833  ret = nppscale_parse_expr(ctx, NULL, &s->w_pexpr, "width", s->w_expr);
834  if (ret < 0)
835  return ret;
836 
837  ret = nppscale_parse_expr(ctx, NULL, &s->h_pexpr, "height", s->h_expr);
838  if (ret < 0)
839  return ret;
840  }
841 
842  if (ctx->filter == &ff_vf_scale2ref_npp) {
843  s->var_values[VAR_S2R_MAIN_N] = inl->frame_count_out;
844  s->var_values[VAR_S2R_MAIN_T] = TS2T(in->pts, link->time_base);
845 #if FF_API_FRAME_PKT
847  s->var_values[VAR_S2R_MAIN_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
849 #endif
850  } else {
851  s->var_values[VAR_N] = inl->frame_count_out;
852  s->var_values[VAR_T] = TS2T(in->pts, link->time_base);
853 #if FF_API_FRAME_PKT
855  s->var_values[VAR_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
857 #endif
858  }
859 
860  link->format = in->format;
861  link->w = in->width;
862  link->h = in->height;
863 
866 
867  if ((ret = config_props(outlink)) < 0)
868  return ret;
869  }
870 
871 scale:
872  for (i = 0; i < FF_ARRAY_ELEMS(s->stages); i++) {
873  if (!s->stages[i].stage_needed)
874  continue;
875 
876  ret = nppscale_process[i](ctx, &s->stages[i], s->stages[i].frame, src);
877  if (ret < 0)
878  return ret;
879 
880  src = s->stages[i].frame;
881  last_stage = i;
882  }
883  if (last_stage < 0)
884  return AVERROR_BUG;
885 
886  ret = av_hwframe_get_buffer(src->hw_frames_ctx, s->tmp_frame, 0);
887  if (ret < 0)
888  return ret;
889 
890  s->tmp_frame->width = src->width;
891  s->tmp_frame->height = src->height;
892 
894  av_frame_move_ref(src, s->tmp_frame);
895 
896  ret = av_frame_copy_props(out, in);
897  if (ret < 0)
898  return ret;
899 
900  return 0;
901 }
902 
904 {
905  AVFilterContext *ctx = link->dst;
906  NPPScaleContext *s = ctx->priv;
907  AVFilterLink *outlink = ctx->outputs[0];
908  FilterLink *l = ff_filter_link(outlink);
910  AVCUDADeviceContext *device_hwctx = frames_ctx->device_ctx->hwctx;
911 
912  AVFrame *out = NULL;
913  CUcontext dummy;
914  int ret = 0;
915 
916  if (s->passthrough)
917  return ff_filter_frame(outlink, in);
918 
919  out = av_frame_alloc();
920  if (!out) {
921  ret = AVERROR(ENOMEM);
922  goto fail;
923  }
924 
925  ret = CHECK_CU(device_hwctx->internal->cuda_dl->cuCtxPushCurrent(device_hwctx->cuda_ctx));
926  if (ret < 0)
927  goto fail;
928 
929  ret = nppscale_scale(link, out, in);
930 
931  CHECK_CU(device_hwctx->internal->cuda_dl->cuCtxPopCurrent(&dummy));
932  if (ret < 0)
933  goto fail;
934 
935  av_reduce(&out->sample_aspect_ratio.num, &out->sample_aspect_ratio.den,
936  (int64_t)in->sample_aspect_ratio.num * outlink->h * link->w,
937  (int64_t)in->sample_aspect_ratio.den * outlink->w * link->h,
938  INT_MAX);
939 
940  av_frame_free(&in);
941  return ff_filter_frame(outlink, out);
942 fail:
943  av_frame_free(&in);
944  av_frame_free(&out);
945  return ret;
946 }
947 
949 {
951  NPPScaleContext *scale = link->dst->priv;
952  AVFilterLink *outlink = link->dst->outputs[1];
953  int frame_changed;
954 
955  frame_changed = in->width != link->w ||
956  in->height != link->h ||
957  in->format != link->format ||
960 
961  if (frame_changed) {
962  link->format = in->format;
963  link->w = in->width;
964  link->h = in->height;
967 
968  config_props_ref(outlink);
969  }
970 
971  if (scale->eval_mode == EVAL_MODE_FRAME) {
972  scale->var_values[VAR_N] = inl->frame_count_out;
973  scale->var_values[VAR_T] = TS2T(in->pts, link->time_base);
974 #if FF_API_FRAME_PKT
976  scale->var_values[VAR_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
978 #endif
979  }
980 
981  return ff_filter_frame(outlink, in);
982 }
983 
984 static int request_frame(AVFilterLink *outlink)
985 {
986  return ff_request_frame(outlink->src->inputs[0]);
987 }
988 
989 static int request_frame_ref(AVFilterLink *outlink)
990 {
991  return ff_request_frame(outlink->src->inputs[1]);
992 }
993 
994 #define OFFSET(x) offsetof(NPPScaleContext, x)
995 #define FLAGS (AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM)
996 static const AVOption options[] = {
997  { "w", "Output video width", OFFSET(w_expr), AV_OPT_TYPE_STRING, .flags = FLAGS },
998  { "h", "Output video height", OFFSET(h_expr), AV_OPT_TYPE_STRING, .flags = FLAGS },
999  { "format", "Output pixel format", OFFSET(format_str), AV_OPT_TYPE_STRING, { .str = "same" }, .flags = FLAGS },
1000  { "s", "Output video size", OFFSET(size_str), AV_OPT_TYPE_STRING, { .str = NULL }, .flags = FLAGS },
1001 
1002  { "interp_algo", "Interpolation algorithm used for resizing", OFFSET(interp_algo), AV_OPT_TYPE_INT, { .i64 = NPPI_INTER_CUBIC }, 0, INT_MAX, FLAGS, .unit = "interp_algo" },
1003  { "nn", "nearest neighbour", 0, AV_OPT_TYPE_CONST, { .i64 = NPPI_INTER_NN }, 0, 0, FLAGS, .unit = "interp_algo" },
1004  { "linear", "linear", 0, AV_OPT_TYPE_CONST, { .i64 = NPPI_INTER_LINEAR }, 0, 0, FLAGS, .unit = "interp_algo" },
1005  { "cubic", "cubic", 0, AV_OPT_TYPE_CONST, { .i64 = NPPI_INTER_CUBIC }, 0, 0, FLAGS, .unit = "interp_algo" },
1006  { "cubic2p_bspline", "2-parameter cubic (B=1, C=0)", 0, AV_OPT_TYPE_CONST, { .i64 = NPPI_INTER_CUBIC2P_BSPLINE }, 0, 0, FLAGS, .unit = "interp_algo" },
1007  { "cubic2p_catmullrom", "2-parameter cubic (B=0, C=1/2)", 0, AV_OPT_TYPE_CONST, { .i64 = NPPI_INTER_CUBIC2P_CATMULLROM }, 0, 0, FLAGS, .unit = "interp_algo" },
1008  { "cubic2p_b05c03", "2-parameter cubic (B=1/2, C=3/10)", 0, AV_OPT_TYPE_CONST, { .i64 = NPPI_INTER_CUBIC2P_B05C03 }, 0, 0, FLAGS, .unit = "interp_algo" },
1009  { "super", "supersampling", 0, AV_OPT_TYPE_CONST, { .i64 = NPPI_INTER_SUPER }, 0, 0, FLAGS, .unit = "interp_algo" },
1010  { "lanczos", "Lanczos", 0, AV_OPT_TYPE_CONST, { .i64 = NPPI_INTER_LANCZOS }, 0, 0, FLAGS, .unit = "interp_algo" },
1011  { "force_original_aspect_ratio", "decrease or increase w/h if necessary to keep the original AR", OFFSET(force_original_aspect_ratio), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 2, FLAGS, .unit = "force_oar" },
1012  { "disable", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 0 }, 0, 0, FLAGS, .unit = "force_oar" },
1013  { "decrease", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 1 }, 0, 0, FLAGS, .unit = "force_oar" },
1014  { "increase", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 2 }, 0, 0, FLAGS, .unit = "force_oar" },
1015  { "force_divisible_by", "enforce that the output resolution is divisible by a defined integer when force_original_aspect_ratio is used", OFFSET(force_divisible_by), AV_OPT_TYPE_INT, { .i64 = 1 }, 1, 256, FLAGS },
1016  { "eval", "specify when to evaluate expressions", OFFSET(eval_mode), AV_OPT_TYPE_INT, { .i64 = EVAL_MODE_INIT }, 0, EVAL_MODE_NB-1, FLAGS, .unit = "eval" },
1017  { "init", "eval expressions once during initialization", 0, AV_OPT_TYPE_CONST, { .i64 = EVAL_MODE_INIT }, 0, 0, FLAGS, .unit = "eval" },
1018  { "frame", "eval expressions during initialization and per-frame", 0, AV_OPT_TYPE_CONST, { .i64 = EVAL_MODE_FRAME }, 0, 0, FLAGS, .unit = "eval" },
1019  { NULL },
1020 };
1021 
1022 static const AVClass nppscale_class = {
1023  .class_name = "nppscale",
1024  .item_name = av_default_item_name,
1025  .option = options,
1026  .version = LIBAVUTIL_VERSION_INT,
1027  .category = AV_CLASS_CATEGORY_FILTER,
1028 };
1029 
1030 static const AVFilterPad nppscale_inputs[] = {
1031  {
1032  .name = "default",
1033  .type = AVMEDIA_TYPE_VIDEO,
1034  .filter_frame = nppscale_filter_frame,
1035  }
1036 };
1037 
1038 static const AVFilterPad nppscale_outputs[] = {
1039  {
1040  .name = "default",
1041  .type = AVMEDIA_TYPE_VIDEO,
1042  .config_props = config_props,
1043  }
1044 };
1045 
1047  .name = "scale_npp",
1048  .description = NULL_IF_CONFIG_SMALL("NVIDIA Performance Primitives video "
1049  "scaling and format conversion"),
1050 
1051  .init = nppscale_init,
1052  .uninit = nppscale_uninit,
1053 
1054  .priv_size = sizeof(NPPScaleContext),
1055  .priv_class = &nppscale_class,
1056 
1059 
1061 
1062  .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
1063 };
1064 
1066  {
1067  .name = "default",
1068  .type = AVMEDIA_TYPE_VIDEO,
1069  .filter_frame = nppscale_filter_frame,
1070  },
1071  {
1072  .name = "ref",
1073  .type = AVMEDIA_TYPE_VIDEO,
1074  .filter_frame = nppscale_filter_frame_ref,
1075  }
1076 };
1077 
1079  {
1080  .name = "default",
1081  .type = AVMEDIA_TYPE_VIDEO,
1082  .config_props = config_props,
1083  .request_frame= request_frame,
1084  },
1085  {
1086  .name = "ref",
1087  .type = AVMEDIA_TYPE_VIDEO,
1088  .config_props = config_props_ref,
1089  .request_frame= request_frame_ref,
1090  }
1091 };
1092 
1094  .name = "scale2ref_npp",
1095  .description = NULL_IF_CONFIG_SMALL("NVIDIA Performance Primitives video "
1096  "scaling and format conversion to the "
1097  "given reference."),
1098 
1099  .init = nppscale_init,
1100  .uninit = nppscale_uninit,
1101 
1102  .priv_size = sizeof(NPPScaleContext),
1103  .priv_class = &nppscale_class,
1104 
1107 
1109 
1110  .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
1111 };
format_is_supported
static int format_is_supported(enum AVPixelFormat fmt)
Definition: vf_scale_npp.c:511
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:85
NPPScaleContext::passthrough
int passthrough
Definition: vf_scale_npp.c:141
FF_ENABLE_DEPRECATION_WARNINGS
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:73
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:215
AV_PIX_FMT_CUDA
@ AV_PIX_FMT_CUDA
HW acceleration through CUDA.
Definition: pixfmt.h:260
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
VAR_OW
@ VAR_OW
Definition: vf_scale_npp.c:107
nppscale_inputs
static const AVFilterPad nppscale_inputs[]
Definition: vf_scale_npp.c:1030
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
nppscale2ref_inputs
static const AVFilterPad nppscale2ref_inputs[]
Definition: vf_scale_npp.c:1065
opt.h
var_name
var_name
Definition: noise.c:47
NPPScaleStageContext::planes_out
struct NPPScaleStageContext::@353 planes_out[4]
hwcontext_cuda_internal.h
out
FILE * out
Definition: movenc.c:55
NPPScaleContext::h_pexpr
AVExpr * h_pexpr
Definition: vf_scale_npp.c:169
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1062
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:3025
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
CHECK_CU
#define CHECK_CU(x)
Definition: vf_scale_npp.c:44
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:197
int64_t
long long int64_t
Definition: coverity.c:34
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
ScaleStage
ScaleStage
Definition: vf_scale_npp.c:57
nppscale_class
static const AVClass nppscale_class
Definition: vf_scale_npp.c:1022
VAR_S2R_MAIN_N
@ VAR_S2R_MAIN_N
Definition: vf_scale_npp.c:122
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:162
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:322
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: filters.h:262
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:389
pixdesc.h
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:501
AVFrame::width
int width
Definition: frame.h:461
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:248
NPPScaleStageContext::stage_needed
int stage_needed
Definition: vf_scale_npp.c:65
VAR_A
@ VAR_A
Definition: vf_scale_npp.c:109
AVOption
AVOption.
Definition: opt.h:429
ff_request_frame
int ff_request_frame(AVFilterLink *link)
Request an input frame from the filter at the other end of the link.
Definition: avfilter.c:475
supported_formats
static enum AVPixelFormat supported_formats[]
Definition: vf_scale_npp.c:46
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:225
STAGE_NB
@ STAGE_NB
Definition: vf_scale_npp.c:61
VAR_SAR
@ VAR_SAR
Definition: vf_scale_npp.c:110
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:205
VAR_S2R_MAIN_T
@ VAR_S2R_MAIN_T
Definition: vf_scale_npp.c:123
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:217
video.h
NPPScaleStageContext::frame
AVFrame * frame
Definition: vf_scale_npp.c:75
VAR_OH
@ VAR_OH
Definition: vf_scale_npp.c:108
NPPScaleContext::size_str
char * size_str
Definition: vf_scale_npp.c:166
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:410
formats.h
av_expr_parse
int av_expr_parse(AVExpr **expr, const char *s, const char *const *const_names, const char *const *func1_names, double(*const *funcs1)(void *, double), const char *const *func2_names, double(*const *funcs2)(void *, double, double), int log_offset, void *log_ctx)
Parse an expression.
Definition: eval.c:710
VAR_S2R_MAIN_POS
@ VAR_S2R_MAIN_POS
Definition: vf_scale.c:122
av_pix_fmt_count_planes
int av_pix_fmt_count_planes(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:3065
fail
#define fail()
Definition: checkasm.h:188
get_deinterleaved_format
static enum AVPixelFormat get_deinterleaved_format(enum AVPixelFormat fmt)
Definition: vf_scale_npp.c:521
dummy
int dummy
Definition: motion.c:66
STAGE_RESIZE
@ STAGE_RESIZE
Definition: vf_scale_npp.c:59
NPPScaleContext::tmp_frame
AVFrame * tmp_frame
Definition: vf_scale_npp.c:140
av_pix_fmt_get_chroma_sub_sample
int av_pix_fmt_get_chroma_sub_sample(enum AVPixelFormat pix_fmt, int *h_shift, int *v_shift)
Utility function to access log2_chroma_w log2_chroma_h from the pixel format AVPixFmtDescriptor.
Definition: pixdesc.c:3053
av_opt_set
int av_opt_set(void *obj, const char *name, const char *val, int search_flags)
Definition: opt.c:827
av_reduce
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
Definition: rational.c:35
NPPScaleContext::shift_width
int shift_width
Definition: vf_scale_npp.c:143
av_expr_free
void av_expr_free(AVExpr *e)
Free a parsed expression previously created with av_expr_parse().
Definition: eval.c:358
AVRational::num
int num
Numerator.
Definition: rational.h:59
AVFilterPad
A filter pad used for either input or output.
Definition: filters.h:38
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:150
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:209
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:90
VAR_IH
@ VAR_IH
Definition: vf_scale_npp.c:106
AVHWFramesContext::height
int height
Definition: hwcontext.h:217
s
#define s(width, name)
Definition: cbs_vp9.c:198
AV_PIX_FMT_YUVA420P
@ AV_PIX_FMT_YUVA420P
planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
Definition: pixfmt.h:108
nppscale_uninit
static void nppscale_uninit(AVFilterContext *ctx)
Definition: vf_scale_npp.c:436
av_expr_count_vars
int av_expr_count_vars(AVExpr *e, unsigned *counter, int size)
Track the presence of variables and their number of occurrences in a parsed expression.
Definition: eval.c:782
NPPScaleContext::stages
NPPScaleStageContext stages[STAGE_NB]
Definition: vf_scale_npp.c:139
filters.h
config_props
static int config_props(AVFilterLink *outlink)
Definition: vf_scale_npp.c:644
NPPScaleContext::eval_mode
int eval_mode
Definition: vf_scale_npp.c:173
nppscale_eval_dimensions
static int nppscale_eval_dimensions(AVFilterContext *ctx)
Definition: vf_scale_npp.c:375
ctx
AVFormatContext * ctx
Definition: movenc.c:49
av_expr_eval
double av_expr_eval(AVExpr *e, const double *const_values, void *opaque)
Evaluate a previously parsed expression.
Definition: eval.c:792
NPPScaleContext::force_original_aspect_ratio
int force_original_aspect_ratio
Definition: vf_scale_npp.c:161
AVExpr
Definition: eval.c:158
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: filters.h:263
check_exprs
static int check_exprs(AVFilterContext *ctx)
Definition: vf_scale_npp.c:180
NAN
#define NAN
Definition: mathematics.h:115
nppscale2ref_outputs
static const AVFilterPad nppscale2ref_outputs[]
Definition: vf_scale_npp.c:1078
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
nppscale_scale
static int nppscale_scale(AVFilterLink *link, AVFrame *out, AVFrame *in)
Definition: vf_scale_npp.c:791
request_frame_ref
static int request_frame_ref(AVFilterLink *outlink)
Definition: vf_scale_npp.c:989
NPPScaleContext
Definition: vf_scale_npp.c:136
if
if(ret)
Definition: filter_design.txt:179
FLAGS
#define FLAGS
Definition: vf_scale_npp.c:995
var_names
static const char *const var_names[]
Definition: vf_scale_npp.c:78
init_stage
static int init_stage(NPPScaleStageContext *stage, AVBufferRef *device_ctx)
Definition: vf_scale_npp.c:452
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:75
nppscale_interleave
static int nppscale_interleave(AVFilterContext *ctx, NPPScaleStageContext *stage, AVFrame *out, AVFrame *in)
Definition: vf_scale_npp.c:759
NULL
#define NULL
Definition: coverity.c:32
ff_vf_scale2ref_npp
const AVFilter ff_vf_scale2ref_npp
Definition: vf_scale_npp.c:176
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:210
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:713
VAR_POS
@ VAR_POS
Definition: noise.c:56
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:126
isnan
#define isnan(x)
Definition: libm.h:340
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:465
EVAL_MODE_INIT
@ EVAL_MODE_INIT
Definition: vf_scale_npp.c:131
VAR_IW
@ VAR_IW
Definition: vf_scale_npp.c:105
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:237
parseutils.h
request_frame
static int request_frame(AVFilterLink *outlink)
Definition: vf_scale_npp.c:984
NPPScaleContext::h_expr
char * h_expr
height expression string
Definition: vf_scale_npp.c:158
double
double
Definition: af_crystalizer.c:132
ff_filter_link
static FilterLink * ff_filter_link(AVFilterLink *link)
Definition: filters.h:197
AV_CLASS_CATEGORY_FILTER
@ AV_CLASS_CATEGORY_FILTER
Definition: log.h:36
FF_FILTER_FLAG_HWFRAME_AWARE
#define FF_FILTER_FLAG_HWFRAME_AWARE
The filter is aware of hardware frames, and any hardware frame context should not be automatically pr...
Definition: filters.h:206
nppscale_parse_expr
static int nppscale_parse_expr(AVFilterContext *ctx, char *str_expr, AVExpr **pexpr_ptr, const char *var, const char *args)
Definition: vf_scale_npp.c:244
NPPScaleContext::shift_height
int shift_height
Definition: vf_scale_npp.c:143
NPPScaleStageContext::height
int height
Definition: vf_scale_npp.c:71
VAR_IN_H
@ VAR_IN_H
Definition: vf_scale_npp.c:106
VAR_S2R_MAIN_H
@ VAR_S2R_MAIN_H
Definition: vf_scale_npp.c:118
eval.h
VAR_S2R_MAIN_A
@ VAR_S2R_MAIN_A
Definition: vf_scale_npp.c:119
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:94
NPPScaleStageContext::out_fmt
enum AVPixelFormat out_fmt
Definition: vf_scale_npp.c:67
OFFSET
#define OFFSET(x)
Definition: vf_scale_npp.c:994
NPPScaleStageContext::frames_ctx
AVBufferRef * frames_ctx
Definition: vf_scale_npp.c:74
AVFrame::time_base
AVRational time_base
Time base for the timestamps in this frame.
Definition: frame.h:516
nppscale_filter_frame_ref
static int nppscale_filter_frame_ref(AVFilterLink *link, AVFrame *in)
Definition: vf_scale_npp.c:948
TS2T
#define TS2T(ts, tb)
Definition: filters.h:278
AVFrame::pkt_pos
attribute_deprecated int64_t pkt_pos
reordered pos from the last AVPacket that has been input into the decoder
Definition: frame.h:699
nppscale_filter_frame
static int nppscale_filter_frame(AVFilterLink *link, AVFrame *in)
Definition: vf_scale_npp.c:903
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:476
VAR_IN_W
@ VAR_IN_W
Definition: vf_scale_npp.c:105
scale_eval.h
init_processing_chain
static int init_processing_chain(AVFilterContext *ctx, int in_width, int in_height, int out_width, int out_height)
Definition: vf_scale_npp.c:535
VARS_NB
@ VARS_NB
Definition: vf_scale_npp.c:127
FF_API_FRAME_PKT
#define FF_API_FRAME_PKT
Definition: version.h:109
av_parse_video_size
int av_parse_video_size(int *width_ptr, int *height_ptr, const char *str)
Parse str and put in width_ptr and height_ptr the detected values.
Definition: parseutils.c:150
VAR_S2R_MAIN_W
@ VAR_S2R_MAIN_W
Definition: vf_scale_npp.c:117
VAR_OUT_W
@ VAR_OUT_W
Definition: vf_scale_npp.c:107
nppscale_resize
static int nppscale_resize(AVFilterContext *ctx, NPPScaleStageContext *stage, AVFrame *out, AVFrame *in)
Definition: vf_scale_npp.c:731
VAR_S2R_MAIN_DAR
@ VAR_S2R_MAIN_DAR
Definition: vf_scale_npp.c:121
VAR_S2R_MAIN_SAR
@ VAR_S2R_MAIN_SAR
Definition: vf_scale_npp.c:120
NPPScaleContext::h
int h
Definition: vf_scale_npp.c:150
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
internal.h
ff_vf_scale_npp
const AVFilter ff_vf_scale_npp
Definition: vf_scale_npp.c:1046
EvalMode
EvalMode
Definition: af_volume.h:39
EVAL_MODE_FRAME
@ EVAL_MODE_FRAME
Definition: vf_scale_npp.c:132
av_frame_move_ref
void av_frame_move_ref(AVFrame *dst, AVFrame *src)
Move everything contained in src to dst and reset src.
Definition: frame.c:637
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:610
AVFilterPad::name
const char * name
Pad name.
Definition: filters.h:44
NPPScaleContext::w
int w
New dimensions.
Definition: vf_scale_npp.c:150
AVFilter
Filter definition.
Definition: avfilter.h:201
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:115
AVCUDADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_cuda.h:42
config_props_ref
static int config_props_ref(AVFilterLink *outlink)
Definition: vf_scale_npp.c:689
ret
ret
Definition: filter_design.txt:187
NPPScaleContext::format
enum AVPixelFormat format
Output sw format.
Definition: vf_scale_npp.c:155
NPPScaleStageContext::planes_in
struct NPPScaleStageContext::@353 planes_in[4]
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:80
AVHWFramesContext::device_ctx
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:134
cuda_check.h
NPPScaleContext::interp_algo
int interp_algo
Definition: vf_scale_npp.c:164
AVFrame::sample_aspect_ratio
AVRational sample_aspect_ratio
Sample aspect ratio for the video frame, 0/1 if unknown/unspecified.
Definition: frame.h:496
ff_scale_adjust_dimensions
int ff_scale_adjust_dimensions(AVFilterLink *inlink, int *ret_w, int *ret_h, int force_original_aspect_ratio, int force_divisible_by)
Transform evaluated width and height obtained from ff_scale_eval_dimensions into actual target width ...
Definition: scale_eval.c:113
NPPScaleContext::w_expr
char * w_expr
width expression string
Definition: vf_scale_npp.c:157
av_get_pix_fmt
enum AVPixelFormat av_get_pix_fmt(const char *name)
Return the pixel format corresponding to name.
Definition: pixdesc.c:2957
NPPScaleStageContext::in_fmt
enum AVPixelFormat in_fmt
Definition: vf_scale_npp.c:66
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:740
nppscale_init
static av_cold int nppscale_init(AVFilterContext *ctx)
Definition: vf_scale_npp.c:300
AVFrame::height
int height
Definition: frame.h:461
EVAL_MODE_NB
@ EVAL_MODE_NB
Definition: vf_scale_npp.c:133
NPPScaleContext::format_str
char * format_str
Definition: vf_scale_npp.c:159
VAR_DAR
@ VAR_DAR
Definition: vf_scale_npp.c:111
STAGE_INTERLEAVE
@ STAGE_INTERLEAVE
Definition: vf_scale_npp.c:60
AVRational::den
int den
Denominator.
Definition: rational.h:60
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Underlying C type is int.
Definition: opt.h:259
avfilter.h
nppscale_process
static int(*const nppscale_process[])(AVFilterContext *ctx, NPPScaleStageContext *stage, AVFrame *out, AVFrame *in)
Definition: vf_scale_npp.c:784
nppscale_outputs
static const AVFilterPad nppscale_outputs[]
Definition: vf_scale_npp.c:1038
NPPScaleContext::force_divisible_by
int force_divisible_by
Definition: vf_scale_npp.c:162
av_mul_q
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
Definition: rational.c:80
NPPScaleStageContext::width
int width
Definition: vf_scale_npp.c:70
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:78
VAR_S2R_MDAR
@ VAR_S2R_MDAR
Definition: vf_scale_npp.c:121
NPPScaleStageContext
Definition: vf_scale_npp.c:64
AVFilterContext
An instance of a filter.
Definition: avfilter.h:457
STAGE_DEINTERLEAVE
@ STAGE_DEINTERLEAVE
Definition: vf_scale_npp.c:58
FF_DISABLE_DEPRECATION_WARNINGS
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:72
VAR_T
@ VAR_T
Definition: vf_scale_npp.c:113
av_strdup
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:272
desc
const char * desc
Definition: libsvtav1.c:79
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
options
static const AVOption options[]
Definition: vf_scale_npp.c:996
nppscale_deinterleave
static int nppscale_deinterleave(AVFilterContext *ctx, NPPScaleStageContext *stage, AVFrame *out, AVFrame *in)
Definition: vf_scale_npp.c:707
VAR_OUT_H
@ VAR_OUT_H
Definition: vf_scale_npp.c:108
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
av_free
#define av_free(p)
Definition: tableprint_vlc.h:33
scale
static void scale(int *out, const int *in, const int w, const int h, const int shift)
Definition: intra.c:291
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
deinterleaved_formats
static enum AVPixelFormat deinterleaved_formats[][2]
Definition: vf_scale_npp.c:53
hwcontext.h
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:52
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:434
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
NPPScaleContext::var_values
double var_values[VARS_NB]
Definition: vf_scale_npp.c:171
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Underlying C type is a uint8_t* that is either NULL or points to a C string allocated with the av_mal...
Definition: opt.h:276
av_hwframe_get_buffer
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
Definition: hwcontext.c:491
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Special option type for declaring named constants.
Definition: opt.h:299
FILTER_SINGLE_PIXFMT
#define FILTER_SINGLE_PIXFMT(pix_fmt_)
Definition: filters.h:252
snprintf
#define snprintf
Definition: snprintf.h:34
NPPScaleContext::w_pexpr
AVExpr * w_pexpr
Definition: vf_scale_npp.c:168
src
#define src
Definition: vp8dsp.c:248
planes
static const struct @458 planes[]
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2945
VAR_N
@ VAR_N
Definition: vf_scale_npp.c:112