FFmpeg
vf_scale.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2007 Bobby Bingham
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 /**
22  * @file
23  * scale video filter
24  */
25 
26 #include <float.h>
27 #include <stdio.h>
28 #include <string.h>
29 
30 #include "avfilter.h"
31 #include "formats.h"
32 #include "internal.h"
33 #include "scale_eval.h"
34 #include "video.h"
35 #include "libavutil/eval.h"
37 #include "libavutil/internal.h"
38 #include "libavutil/mem.h"
39 #include "libavutil/opt.h"
40 #include "libavutil/parseutils.h"
41 #include "libavutil/pixdesc.h"
42 #include "libswscale/swscale.h"
43 
44 static const char *const var_names[] = {
45  "in_w", "iw",
46  "in_h", "ih",
47  "out_w", "ow",
48  "out_h", "oh",
49  "a",
50  "sar",
51  "dar",
52  "hsub",
53  "vsub",
54  "ohsub",
55  "ovsub",
56  "n",
57  "t",
58 #if FF_API_FRAME_PKT
59  "pos",
60 #endif
61  "main_w",
62  "main_h",
63  "main_a",
64  "main_sar",
65  "main_dar", "mdar",
66  "main_hsub",
67  "main_vsub",
68  "main_n",
69  "main_t",
70  "main_pos",
71  NULL
72 };
73 
74 enum var_name {
88 #if FF_API_FRAME_PKT
89  VAR_POS,
90 #endif
102 };
103 
104 enum EvalMode {
108 };
109 
110 typedef struct ScaleContext {
111  const AVClass *class;
112  struct SwsContext *sws; ///< software scaler context
113  struct SwsContext *isws[2]; ///< software scaler context for interlaced material
114  // context used for forwarding options to sws
116 
117  /**
118  * New dimensions. Special values are:
119  * 0 = original width/height
120  * -1 = keep original aspect
121  * -N = try to keep aspect but make sure it is divisible by N
122  */
123  int w, h;
124  char *size_str;
125  double param[2]; // sws params
126 
127  int hsub, vsub; ///< chroma subsampling
128  int slice_y; ///< top of current output slice
129  int input_is_pal; ///< set to 1 if the input format is paletted
130  int output_is_pal; ///< set to 1 if the output format is paletted
132 
133  char *w_expr; ///< width expression string
134  char *h_expr; ///< height expression string
138 
139  char *flags_str;
140 
143 
144  int in_range;
146 
151 
154 
155  int eval_mode; ///< expression evaluation mode
156 
157 } ScaleContext;
158 
160 
161 static int config_props(AVFilterLink *outlink);
162 
164 {
165  ScaleContext *scale = ctx->priv;
166  unsigned vars_w[VARS_NB] = { 0 }, vars_h[VARS_NB] = { 0 };
167 
168  if (!scale->w_pexpr && !scale->h_pexpr)
169  return AVERROR(EINVAL);
170 
171  if (scale->w_pexpr)
172  av_expr_count_vars(scale->w_pexpr, vars_w, VARS_NB);
173  if (scale->h_pexpr)
174  av_expr_count_vars(scale->h_pexpr, vars_h, VARS_NB);
175 
176  if (vars_w[VAR_OUT_W] || vars_w[VAR_OW]) {
177  av_log(ctx, AV_LOG_ERROR, "Width expression cannot be self-referencing: '%s'.\n", scale->w_expr);
178  return AVERROR(EINVAL);
179  }
180 
181  if (vars_h[VAR_OUT_H] || vars_h[VAR_OH]) {
182  av_log(ctx, AV_LOG_ERROR, "Height expression cannot be self-referencing: '%s'.\n", scale->h_expr);
183  return AVERROR(EINVAL);
184  }
185 
186  if ((vars_w[VAR_OUT_H] || vars_w[VAR_OH]) &&
187  (vars_h[VAR_OUT_W] || vars_h[VAR_OW])) {
188  av_log(ctx, AV_LOG_WARNING, "Circular references detected for width '%s' and height '%s' - possibly invalid.\n", scale->w_expr, scale->h_expr);
189  }
190 
191  if (ctx->filter != &ff_vf_scale2ref &&
192  (vars_w[VAR_S2R_MAIN_W] || vars_h[VAR_S2R_MAIN_W] ||
193  vars_w[VAR_S2R_MAIN_H] || vars_h[VAR_S2R_MAIN_H] ||
194  vars_w[VAR_S2R_MAIN_A] || vars_h[VAR_S2R_MAIN_A] ||
195  vars_w[VAR_S2R_MAIN_SAR] || vars_h[VAR_S2R_MAIN_SAR] ||
196  vars_w[VAR_S2R_MAIN_DAR] || vars_h[VAR_S2R_MAIN_DAR] ||
197  vars_w[VAR_S2R_MDAR] || vars_h[VAR_S2R_MDAR] ||
198  vars_w[VAR_S2R_MAIN_HSUB] || vars_h[VAR_S2R_MAIN_HSUB] ||
199  vars_w[VAR_S2R_MAIN_VSUB] || vars_h[VAR_S2R_MAIN_VSUB] ||
200  vars_w[VAR_S2R_MAIN_N] || vars_h[VAR_S2R_MAIN_N] ||
201  vars_w[VAR_S2R_MAIN_T] || vars_h[VAR_S2R_MAIN_T] ||
202  vars_w[VAR_S2R_MAIN_POS] || vars_h[VAR_S2R_MAIN_POS]) ) {
203  av_log(ctx, AV_LOG_ERROR, "Expressions with scale2ref variables are not valid in scale filter.\n");
204  return AVERROR(EINVAL);
205  }
206 
207  if (scale->eval_mode == EVAL_MODE_INIT &&
208  (vars_w[VAR_N] || vars_h[VAR_N] ||
209  vars_w[VAR_T] || vars_h[VAR_T] ||
211  vars_w[VAR_POS] || vars_h[VAR_POS] ||
212 #endif
213  vars_w[VAR_S2R_MAIN_N] || vars_h[VAR_S2R_MAIN_N] ||
214  vars_w[VAR_S2R_MAIN_T] || vars_h[VAR_S2R_MAIN_T] ||
215  vars_w[VAR_S2R_MAIN_POS] || vars_h[VAR_S2R_MAIN_POS]) ) {
216  av_log(ctx, AV_LOG_ERROR, "Expressions with frame variables 'n', 't', 'pos' are not valid in init eval_mode.\n");
217  return AVERROR(EINVAL);
218  }
219 
220  return 0;
221 }
222 
223 static int scale_parse_expr(AVFilterContext *ctx, char *str_expr, AVExpr **pexpr_ptr, const char *var, const char *args)
224 {
225  ScaleContext *scale = ctx->priv;
226  int ret, is_inited = 0;
227  char *old_str_expr = NULL;
228  AVExpr *old_pexpr = NULL;
229 
230  if (str_expr) {
231  old_str_expr = av_strdup(str_expr);
232  if (!old_str_expr)
233  return AVERROR(ENOMEM);
234  av_opt_set(scale, var, args, 0);
235  }
236 
237  if (*pexpr_ptr) {
238  old_pexpr = *pexpr_ptr;
239  *pexpr_ptr = NULL;
240  is_inited = 1;
241  }
242 
243  ret = av_expr_parse(pexpr_ptr, args, var_names,
244  NULL, NULL, NULL, NULL, 0, ctx);
245  if (ret < 0) {
246  av_log(ctx, AV_LOG_ERROR, "Cannot parse expression for %s: '%s'\n", var, args);
247  goto revert;
248  }
249 
250  ret = check_exprs(ctx);
251  if (ret < 0)
252  goto revert;
253 
254  if (is_inited && (ret = config_props(ctx->outputs[0])) < 0)
255  goto revert;
256 
257  av_expr_free(old_pexpr);
258  old_pexpr = NULL;
259  av_freep(&old_str_expr);
260 
261  return 0;
262 
263 revert:
264  av_expr_free(*pexpr_ptr);
265  *pexpr_ptr = NULL;
266  if (old_str_expr) {
267  av_opt_set(scale, var, old_str_expr, 0);
268  av_free(old_str_expr);
269  }
270  if (old_pexpr)
271  *pexpr_ptr = old_pexpr;
272 
273  return ret;
274 }
275 
277 {
278  ScaleContext *scale = ctx->priv;
279  int ret;
280 
281  scale->sws_opts = sws_alloc_context();
282  if (!scale->sws_opts)
283  return AVERROR(ENOMEM);
284 
285  // set threads=0, so we can later check whether the user modified it
286  ret = av_opt_set_int(scale->sws_opts, "threads", 0, 0);
287  if (ret < 0)
288  return ret;
289 
290  return 0;
291 }
292 
293 static const int sws_colorspaces[] = {
302  -1
303 };
304 
306 {
307  ScaleContext *scale = ctx->priv;
308  int64_t threads;
309  int ret;
310 
311  if (scale->size_str && (scale->w_expr || scale->h_expr)) {
313  "Size and width/height expressions cannot be set at the same time.\n");
314  return AVERROR(EINVAL);
315  }
316 
317  if (scale->w_expr && !scale->h_expr)
318  FFSWAP(char *, scale->w_expr, scale->size_str);
319 
320  if (scale->size_str) {
321  char buf[32];
322  if ((ret = av_parse_video_size(&scale->w, &scale->h, scale->size_str)) < 0) {
324  "Invalid size '%s'\n", scale->size_str);
325  return ret;
326  }
327  snprintf(buf, sizeof(buf)-1, "%d", scale->w);
328  av_opt_set(scale, "w", buf, 0);
329  snprintf(buf, sizeof(buf)-1, "%d", scale->h);
330  av_opt_set(scale, "h", buf, 0);
331  }
332  if (!scale->w_expr)
333  av_opt_set(scale, "w", "iw", 0);
334  if (!scale->h_expr)
335  av_opt_set(scale, "h", "ih", 0);
336 
337  ret = scale_parse_expr(ctx, NULL, &scale->w_pexpr, "width", scale->w_expr);
338  if (ret < 0)
339  return ret;
340 
341  ret = scale_parse_expr(ctx, NULL, &scale->h_pexpr, "height", scale->h_expr);
342  if (ret < 0)
343  return ret;
344 
345  if (scale->in_color_matrix != -1 &&
346  !ff_fmt_is_in(scale->in_color_matrix, sws_colorspaces)) {
347  av_log(ctx, AV_LOG_ERROR, "Unsupported input color matrix '%s'\n",
348  av_color_space_name(scale->in_color_matrix));
349  return AVERROR(EINVAL);
350  }
351 
352  if (!ff_fmt_is_in(scale->out_color_matrix, sws_colorspaces)) {
353  av_log(ctx, AV_LOG_ERROR, "Unsupported output color matrix '%s'\n",
354  av_color_space_name(scale->out_color_matrix));
355  return AVERROR(EINVAL);
356  }
357 
358  av_log(ctx, AV_LOG_VERBOSE, "w:%s h:%s flags:'%s' interl:%d\n",
359  scale->w_expr, scale->h_expr, (char *)av_x_if_null(scale->flags_str, ""), scale->interlaced);
360 
361  if (scale->flags_str && *scale->flags_str) {
362  ret = av_opt_set(scale->sws_opts, "sws_flags", scale->flags_str, 0);
363  if (ret < 0)
364  return ret;
365  }
366 
367  for (int i = 0; i < FF_ARRAY_ELEMS(scale->param); i++)
368  if (scale->param[i] != DBL_MAX) {
369  ret = av_opt_set_double(scale->sws_opts, i ? "param1" : "param0",
370  scale->param[i], 0);
371  if (ret < 0)
372  return ret;
373  }
374 
375  // use generic thread-count if the user did not set it explicitly
376  ret = av_opt_get_int(scale->sws_opts, "threads", 0, &threads);
377  if (ret < 0)
378  return ret;
379  if (!threads)
380  av_opt_set_int(scale->sws_opts, "threads", ff_filter_get_nb_threads(ctx), 0);
381 
382  return 0;
383 }
384 
386 {
387  ScaleContext *scale = ctx->priv;
388  av_expr_free(scale->w_pexpr);
389  av_expr_free(scale->h_pexpr);
390  scale->w_pexpr = scale->h_pexpr = NULL;
391  sws_freeContext(scale->sws_opts);
392  sws_freeContext(scale->sws);
393  sws_freeContext(scale->isws[0]);
394  sws_freeContext(scale->isws[1]);
395  scale->sws = NULL;
396 }
397 
399 {
400  ScaleContext *scale = ctx->priv;
402  const AVPixFmtDescriptor *desc;
403  enum AVPixelFormat pix_fmt;
404  int ret;
405 
406  desc = NULL;
407  formats = NULL;
408  while ((desc = av_pix_fmt_desc_next(desc))) {
412  && (ret = ff_add_format(&formats, pix_fmt)) < 0) {
413  return ret;
414  }
415  }
416  if ((ret = ff_formats_ref(formats, &ctx->inputs[0]->outcfg.formats)) < 0)
417  return ret;
418 
419  desc = NULL;
420  formats = NULL;
421  while ((desc = av_pix_fmt_desc_next(desc))) {
425  && (ret = ff_add_format(&formats, pix_fmt)) < 0) {
426  return ret;
427  }
428  }
429  if ((ret = ff_formats_ref(formats, &ctx->outputs[0]->incfg.formats)) < 0)
430  return ret;
431 
432  /* accept all supported inputs, even if user overrides their properties */
434  &ctx->inputs[0]->outcfg.color_spaces)) < 0)
435  return ret;
436 
438  &ctx->inputs[0]->outcfg.color_ranges)) < 0)
439  return ret;
440 
441  /* propagate output properties if overridden */
442  formats = scale->out_color_matrix != AVCOL_SPC_UNSPECIFIED
443  ? ff_make_formats_list_singleton(scale->out_color_matrix)
445  if ((ret = ff_formats_ref(formats, &ctx->outputs[0]->incfg.color_spaces)) < 0)
446  return ret;
447 
448  formats = scale->out_range != AVCOL_RANGE_UNSPECIFIED
451  if ((ret = ff_formats_ref(formats, &ctx->outputs[0]->incfg.color_ranges)) < 0)
452  return ret;
453 
454  return 0;
455 }
456 
458 {
459  ScaleContext *scale = ctx->priv;
460  const char scale2ref = ctx->filter == &ff_vf_scale2ref;
461  const AVFilterLink *inlink = scale2ref ? ctx->inputs[1] : ctx->inputs[0];
462  const AVFilterLink *outlink = ctx->outputs[0];
464  const AVPixFmtDescriptor *out_desc = av_pix_fmt_desc_get(outlink->format);
465  char *expr;
466  int eval_w, eval_h;
467  int ret;
468  double res;
469  const AVPixFmtDescriptor *main_desc;
470  const AVFilterLink *main_link;
471 
472  if (scale2ref) {
473  main_link = ctx->inputs[0];
474  main_desc = av_pix_fmt_desc_get(main_link->format);
475  }
476 
477  scale->var_values[VAR_IN_W] = scale->var_values[VAR_IW] = inlink->w;
478  scale->var_values[VAR_IN_H] = scale->var_values[VAR_IH] = inlink->h;
479  scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = NAN;
480  scale->var_values[VAR_OUT_H] = scale->var_values[VAR_OH] = NAN;
481  scale->var_values[VAR_A] = (double) inlink->w / inlink->h;
482  scale->var_values[VAR_SAR] = inlink->sample_aspect_ratio.num ?
483  (double) inlink->sample_aspect_ratio.num / inlink->sample_aspect_ratio.den : 1;
484  scale->var_values[VAR_DAR] = scale->var_values[VAR_A] * scale->var_values[VAR_SAR];
485  scale->var_values[VAR_HSUB] = 1 << desc->log2_chroma_w;
486  scale->var_values[VAR_VSUB] = 1 << desc->log2_chroma_h;
487  scale->var_values[VAR_OHSUB] = 1 << out_desc->log2_chroma_w;
488  scale->var_values[VAR_OVSUB] = 1 << out_desc->log2_chroma_h;
489 
490  if (scale2ref) {
491  scale->var_values[VAR_S2R_MAIN_W] = main_link->w;
492  scale->var_values[VAR_S2R_MAIN_H] = main_link->h;
493  scale->var_values[VAR_S2R_MAIN_A] = (double) main_link->w / main_link->h;
494  scale->var_values[VAR_S2R_MAIN_SAR] = main_link->sample_aspect_ratio.num ?
495  (double) main_link->sample_aspect_ratio.num / main_link->sample_aspect_ratio.den : 1;
496  scale->var_values[VAR_S2R_MAIN_DAR] = scale->var_values[VAR_S2R_MDAR] =
497  scale->var_values[VAR_S2R_MAIN_A] * scale->var_values[VAR_S2R_MAIN_SAR];
498  scale->var_values[VAR_S2R_MAIN_HSUB] = 1 << main_desc->log2_chroma_w;
499  scale->var_values[VAR_S2R_MAIN_VSUB] = 1 << main_desc->log2_chroma_h;
500  }
501 
502  res = av_expr_eval(scale->w_pexpr, scale->var_values, NULL);
503  eval_w = scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = (int) res == 0 ? inlink->w : (int) res;
504 
505  res = av_expr_eval(scale->h_pexpr, scale->var_values, NULL);
506  if (isnan(res)) {
507  expr = scale->h_expr;
508  ret = AVERROR(EINVAL);
509  goto fail;
510  }
511  eval_h = scale->var_values[VAR_OUT_H] = scale->var_values[VAR_OH] = (int) res == 0 ? inlink->h : (int) res;
512 
513  res = av_expr_eval(scale->w_pexpr, scale->var_values, NULL);
514  if (isnan(res)) {
515  expr = scale->w_expr;
516  ret = AVERROR(EINVAL);
517  goto fail;
518  }
519  eval_w = scale->var_values[VAR_OUT_W] = scale->var_values[VAR_OW] = (int) res == 0 ? inlink->w : (int) res;
520 
521  scale->w = eval_w;
522  scale->h = eval_h;
523 
524  return 0;
525 
526 fail:
528  "Error when evaluating the expression '%s'.\n", expr);
529  return ret;
530 }
531 
532 static int config_props(AVFilterLink *outlink)
533 {
534  AVFilterContext *ctx = outlink->src;
535  AVFilterLink *inlink0 = outlink->src->inputs[0];
536  AVFilterLink *inlink = ctx->filter == &ff_vf_scale2ref ?
537  outlink->src->inputs[1] :
538  outlink->src->inputs[0];
539  enum AVPixelFormat outfmt = outlink->format;
541  const AVPixFmtDescriptor *outdesc = av_pix_fmt_desc_get(outfmt);
542  ScaleContext *scale = ctx->priv;
543  uint8_t *flags_val = NULL;
544  int in_range, in_colorspace;
545  int ret;
546 
547  if ((ret = scale_eval_dimensions(ctx)) < 0)
548  goto fail;
549 
550  outlink->w = scale->w;
551  outlink->h = scale->h;
552 
553  ff_scale_adjust_dimensions(inlink, &outlink->w, &outlink->h,
554  scale->force_original_aspect_ratio,
555  scale->force_divisible_by);
556 
557  if (outlink->w > INT_MAX ||
558  outlink->h > INT_MAX ||
559  (outlink->h * inlink->w) > INT_MAX ||
560  (outlink->w * inlink->h) > INT_MAX)
561  av_log(ctx, AV_LOG_ERROR, "Rescaled value for width or height is too big.\n");
562 
563  /* TODO: make algorithm configurable */
564 
565  scale->input_is_pal = desc->flags & AV_PIX_FMT_FLAG_PAL;
566  if (outfmt == AV_PIX_FMT_PAL8) outfmt = AV_PIX_FMT_BGR8;
567  scale->output_is_pal = av_pix_fmt_desc_get(outfmt)->flags & AV_PIX_FMT_FLAG_PAL;
568 
569  in_range = scale->in_range;
570  if (in_range == AVCOL_RANGE_UNSPECIFIED)
571  in_range = inlink0->color_range;
572 
573  in_colorspace = scale->in_color_matrix;
574  if (in_colorspace == -1 /* auto */)
575  in_colorspace = inlink0->colorspace;
576 
577  if (scale->sws)
578  sws_freeContext(scale->sws);
579  if (scale->isws[0])
580  sws_freeContext(scale->isws[0]);
581  if (scale->isws[1])
582  sws_freeContext(scale->isws[1]);
583  scale->isws[0] = scale->isws[1] = scale->sws = NULL;
584  if (inlink0->w == outlink->w &&
585  inlink0->h == outlink->h &&
586  in_range == outlink->color_range &&
587  in_colorspace == outlink->colorspace &&
588  inlink0->format == outlink->format)
589  ;
590  else {
591  struct SwsContext **swscs[3] = {&scale->sws, &scale->isws[0], &scale->isws[1]};
592  int i;
593 
594  for (i = 0; i < 3; i++) {
595  int in_v_chr_pos = scale->in_v_chr_pos, out_v_chr_pos = scale->out_v_chr_pos;
596  int in_full, out_full, brightness, contrast, saturation;
597  const int *inv_table, *table;
598  struct SwsContext *const s = sws_alloc_context();
599  if (!s)
600  return AVERROR(ENOMEM);
601  *swscs[i] = s;
602 
603  ret = av_opt_copy(s, scale->sws_opts);
604  if (ret < 0)
605  return ret;
606 
607  av_opt_set_int(s, "srcw", inlink0 ->w, 0);
608  av_opt_set_int(s, "srch", inlink0 ->h >> !!i, 0);
609  av_opt_set_int(s, "src_format", inlink0->format, 0);
610  av_opt_set_int(s, "dstw", outlink->w, 0);
611  av_opt_set_int(s, "dsth", outlink->h >> !!i, 0);
612  av_opt_set_int(s, "dst_format", outfmt, 0);
613  if (in_range != AVCOL_RANGE_UNSPECIFIED)
614  av_opt_set_int(s, "src_range",
615  in_range == AVCOL_RANGE_JPEG, 0);
616  if (outlink->color_range != AVCOL_RANGE_UNSPECIFIED)
617  av_opt_set_int(s, "dst_range",
618  outlink->color_range == AVCOL_RANGE_JPEG, 0);
619 
620  /* Override chroma location default settings to have the correct
621  * chroma positions. MPEG chroma positions are used by convention.
622  * Note that this works for both MPEG-1/JPEG and MPEG-2/4 chroma
623  * locations, since they share a vertical alignment */
624  if (desc->log2_chroma_h == 1 && scale->in_v_chr_pos == -513) {
625  in_v_chr_pos = (i == 0) ? 128 : (i == 1) ? 64 : 192;
626  }
627 
628  if (outdesc->log2_chroma_h == 1 && scale->out_v_chr_pos == -513) {
629  out_v_chr_pos = (i == 0) ? 128 : (i == 1) ? 64 : 192;
630  }
631 
632  av_opt_set_int(s, "src_h_chr_pos", scale->in_h_chr_pos, 0);
633  av_opt_set_int(s, "src_v_chr_pos", in_v_chr_pos, 0);
634  av_opt_set_int(s, "dst_h_chr_pos", scale->out_h_chr_pos, 0);
635  av_opt_set_int(s, "dst_v_chr_pos", out_v_chr_pos, 0);
636 
637  if ((ret = sws_init_context(s, NULL, NULL)) < 0)
638  return ret;
639 
640  sws_getColorspaceDetails(s, (int **)&inv_table, &in_full,
641  (int **)&table, &out_full,
643 
644  if (scale->in_color_matrix == -1 /* auto */)
645  inv_table = sws_getCoefficients(inlink0->colorspace);
646  else if (scale->in_color_matrix != AVCOL_SPC_UNSPECIFIED)
647  inv_table = sws_getCoefficients(scale->in_color_matrix);
648  if (outlink->colorspace != AVCOL_SPC_UNSPECIFIED)
650  else if (scale->in_color_matrix != AVCOL_SPC_UNSPECIFIED)
651  table = inv_table;
652 
653  sws_setColorspaceDetails(s, inv_table, in_full,
654  table, out_full,
656 
657  if (!scale->interlaced)
658  break;
659  }
660  }
661 
662  if (inlink0->sample_aspect_ratio.num){
663  outlink->sample_aspect_ratio = av_mul_q((AVRational){outlink->h * inlink0->w, outlink->w * inlink0->h}, inlink0->sample_aspect_ratio);
664  } else
665  outlink->sample_aspect_ratio = inlink0->sample_aspect_ratio;
666 
667  if (scale->sws)
668  av_opt_get(scale->sws, "sws_flags", 0, &flags_val);
669 
670  av_log(ctx, AV_LOG_VERBOSE, "w:%d h:%d fmt:%s csp:%s range:%s sar:%d/%d -> w:%d h:%d fmt:%s csp:%s range:%s sar:%d/%d flags:%s\n",
671  inlink ->w, inlink ->h, av_get_pix_fmt_name( inlink->format),
672  av_color_space_name(inlink->colorspace), av_color_range_name(inlink->color_range),
673  inlink->sample_aspect_ratio.num, inlink->sample_aspect_ratio.den,
674  outlink->w, outlink->h, av_get_pix_fmt_name(outlink->format),
676  outlink->sample_aspect_ratio.num, outlink->sample_aspect_ratio.den,
677  flags_val);
678  av_freep(&flags_val);
679 
680  return 0;
681 
682 fail:
683  return ret;
684 }
685 
686 static int config_props_ref(AVFilterLink *outlink)
687 {
688  AVFilterLink *inlink = outlink->src->inputs[1];
689 
690  outlink->w = inlink->w;
691  outlink->h = inlink->h;
692  outlink->sample_aspect_ratio = inlink->sample_aspect_ratio;
693  outlink->time_base = inlink->time_base;
694  outlink->frame_rate = inlink->frame_rate;
695  outlink->colorspace = inlink->colorspace;
696  outlink->color_range = inlink->color_range;
697 
698  return 0;
699 }
700 
701 static int request_frame(AVFilterLink *outlink)
702 {
703  return ff_request_frame(outlink->src->inputs[0]);
704 }
705 
706 static int request_frame_ref(AVFilterLink *outlink)
707 {
708  return ff_request_frame(outlink->src->inputs[1]);
709 }
710 
711 static void frame_offset(AVFrame *frame, int dir, int is_pal)
712 {
713  for (int i = 0; i < 4 && frame->data[i]; i++) {
714  if (i == 1 && is_pal)
715  break;
716  frame->data[i] += frame->linesize[i] * dir;
717  }
718 }
719 
721  int field)
722 {
723  int orig_h_src = src->height;
724  int orig_h_dst = dst->height;
725  int ret;
726 
727  // offset the data pointers for the bottom field
728  if (field) {
729  frame_offset(src, 1, scale->input_is_pal);
730  frame_offset(dst, 1, scale->output_is_pal);
731  }
732 
733  // take every second line
734  for (int i = 0; i < 4; i++) {
735  src->linesize[i] *= 2;
736  dst->linesize[i] *= 2;
737  }
738  src->height /= 2;
739  dst->height /= 2;
740 
741  ret = sws_scale_frame(scale->isws[field], dst, src);
742  if (ret < 0)
743  return ret;
744 
745  // undo the changes we made above
746  for (int i = 0; i < 4; i++) {
747  src->linesize[i] /= 2;
748  dst->linesize[i] /= 2;
749  }
750  src->height = orig_h_src;
751  dst->height = orig_h_dst;
752 
753  if (field) {
754  frame_offset(src, -1, scale->input_is_pal);
755  frame_offset(dst, -1, scale->output_is_pal);
756  }
757 
758  return 0;
759 }
760 
761 static int scale_frame(AVFilterLink *link, AVFrame *in, AVFrame **frame_out)
762 {
763  AVFilterContext *ctx = link->dst;
764  ScaleContext *scale = ctx->priv;
765  AVFilterLink *outlink = ctx->outputs[0];
766  AVFrame *out;
768  char buf[32];
769  int ret;
770  int frame_changed;
771 
772  *frame_out = NULL;
773  if (in->colorspace == AVCOL_SPC_YCGCO)
774  av_log(link->dst, AV_LOG_WARNING, "Detected unsupported YCgCo colorspace.\n");
775 
776  frame_changed = in->width != link->w ||
777  in->height != link->h ||
778  in->format != link->format ||
781  in->colorspace != link->colorspace ||
782  in->color_range != link->color_range;
783 
784  if (scale->eval_mode == EVAL_MODE_FRAME || frame_changed) {
785  unsigned vars_w[VARS_NB] = { 0 }, vars_h[VARS_NB] = { 0 };
786 
787  av_expr_count_vars(scale->w_pexpr, vars_w, VARS_NB);
788  av_expr_count_vars(scale->h_pexpr, vars_h, VARS_NB);
789 
790  if (scale->eval_mode == EVAL_MODE_FRAME &&
791  !frame_changed &&
792  ctx->filter != &ff_vf_scale2ref &&
793  !(vars_w[VAR_N] || vars_w[VAR_T]
795  || vars_w[VAR_POS]
796 #endif
797  ) &&
798  !(vars_h[VAR_N] || vars_h[VAR_T]
800  || vars_h[VAR_POS]
801 #endif
802  ) &&
803  scale->w && scale->h)
804  goto scale;
805 
806  if (scale->eval_mode == EVAL_MODE_INIT) {
807  snprintf(buf, sizeof(buf) - 1, "%d", scale->w);
808  av_opt_set(scale, "w", buf, 0);
809  snprintf(buf, sizeof(buf) - 1, "%d", scale->h);
810  av_opt_set(scale, "h", buf, 0);
811 
812  ret = scale_parse_expr(ctx, NULL, &scale->w_pexpr, "width", scale->w_expr);
813  if (ret < 0)
814  return ret;
815 
816  ret = scale_parse_expr(ctx, NULL, &scale->h_pexpr, "height", scale->h_expr);
817  if (ret < 0)
818  return ret;
819  }
820 
821  if (ctx->filter == &ff_vf_scale2ref) {
822  scale->var_values[VAR_S2R_MAIN_N] = link->frame_count_out;
823  scale->var_values[VAR_S2R_MAIN_T] = TS2T(in->pts, link->time_base);
824 #if FF_API_FRAME_PKT
826  scale->var_values[VAR_S2R_MAIN_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
828 #endif
829  } else {
830  scale->var_values[VAR_N] = link->frame_count_out;
831  scale->var_values[VAR_T] = TS2T(in->pts, link->time_base);
832 #if FF_API_FRAME_PKT
834  scale->var_values[VAR_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
836 #endif
837  }
838 
839  link->dst->inputs[0]->format = in->format;
840  link->dst->inputs[0]->w = in->width;
841  link->dst->inputs[0]->h = in->height;
842  link->dst->inputs[0]->colorspace = in->colorspace;
843  link->dst->inputs[0]->color_range = in->color_range;
844 
845  link->dst->inputs[0]->sample_aspect_ratio.den = in->sample_aspect_ratio.den;
846  link->dst->inputs[0]->sample_aspect_ratio.num = in->sample_aspect_ratio.num;
847 
848  if ((ret = config_props(outlink)) < 0)
849  return ret;
850  }
851 
852 scale:
853  if (!scale->sws) {
854  *frame_out = in;
855  return 0;
856  }
857 
858  scale->hsub = desc->log2_chroma_w;
859  scale->vsub = desc->log2_chroma_h;
860 
861  out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
862  if (!out) {
863  av_frame_free(&in);
864  return AVERROR(ENOMEM);
865  }
866  *frame_out = out;
867 
869  out->width = outlink->w;
870  out->height = outlink->h;
871  out->color_range = outlink->color_range;
872  out->colorspace = outlink->colorspace;
873 
874  if (scale->output_is_pal)
875  avpriv_set_systematic_pal2((uint32_t*)out->data[1], outlink->format == AV_PIX_FMT_PAL8 ? AV_PIX_FMT_BGR8 : outlink->format);
876 
877  av_reduce(&out->sample_aspect_ratio.num, &out->sample_aspect_ratio.den,
878  (int64_t)in->sample_aspect_ratio.num * outlink->h * link->w,
879  (int64_t)in->sample_aspect_ratio.den * outlink->w * link->h,
880  INT_MAX);
881 
882  if (scale->interlaced>0 || (scale->interlaced<0 &&
883  (in->flags & AV_FRAME_FLAG_INTERLACED))) {
884  ret = scale_field(scale, out, in, 0);
885  if (ret >= 0)
886  ret = scale_field(scale, out, in, 1);
887  } else {
888  ret = sws_scale_frame(scale->sws, out, in);
889  }
890 
891  av_frame_free(&in);
892  if (ret < 0)
893  av_frame_free(frame_out);
894  return ret;
895 }
896 
898 {
899  AVFilterContext *ctx = link->dst;
900  AVFilterLink *outlink = ctx->outputs[0];
901  AVFrame *out;
902  int ret;
903 
904  ret = scale_frame(link, in, &out);
905  if (out)
906  return ff_filter_frame(outlink, out);
907 
908  return ret;
909 }
910 
912 {
913  ScaleContext *scale = link->dst->priv;
914  AVFilterLink *outlink = link->dst->outputs[1];
915  int frame_changed;
916 
917  frame_changed = in->width != link->w ||
918  in->height != link->h ||
919  in->format != link->format ||
922  in->colorspace != link->colorspace ||
923  in->color_range != link->color_range;
924 
925  if (frame_changed) {
926  link->format = in->format;
927  link->w = in->width;
928  link->h = in->height;
931  link->colorspace = in->colorspace;
933 
934  config_props_ref(outlink);
935  }
936 
937  if (scale->eval_mode == EVAL_MODE_FRAME) {
938  scale->var_values[VAR_N] = link->frame_count_out;
939  scale->var_values[VAR_T] = TS2T(in->pts, link->time_base);
940 #if FF_API_FRAME_PKT
942  scale->var_values[VAR_POS] = in->pkt_pos == -1 ? NAN : in->pkt_pos;
944 #endif
945  }
946 
947  return ff_filter_frame(outlink, in);
948 }
949 
950 static int process_command(AVFilterContext *ctx, const char *cmd, const char *args,
951  char *res, int res_len, int flags)
952 {
953  ScaleContext *scale = ctx->priv;
954  char *str_expr;
955  AVExpr **pexpr_ptr;
956  int ret, w, h;
957 
958  w = !strcmp(cmd, "width") || !strcmp(cmd, "w");
959  h = !strcmp(cmd, "height") || !strcmp(cmd, "h");
960 
961  if (w || h) {
962  str_expr = w ? scale->w_expr : scale->h_expr;
963  pexpr_ptr = w ? &scale->w_pexpr : &scale->h_pexpr;
964 
965  ret = scale_parse_expr(ctx, str_expr, pexpr_ptr, cmd, args);
966  } else
967  ret = AVERROR(ENOSYS);
968 
969  if (ret < 0)
970  av_log(ctx, AV_LOG_ERROR, "Failed to process command. Continuing with existing parameters.\n");
971 
972  return ret;
973 }
974 
975 static const AVClass *child_class_iterate(void **iter)
976 {
977  const AVClass *c = *iter ? NULL : sws_get_class();
978  *iter = (void*)(uintptr_t)c;
979  return c;
980 }
981 
982 static void *child_next(void *obj, void *prev)
983 {
984  ScaleContext *s = obj;
985  if (!prev)
986  return s->sws_opts;
987  return NULL;
988 }
989 
990 #define OFFSET(x) offsetof(ScaleContext, x)
991 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
992 #define TFLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_RUNTIME_PARAM
993 
994 static const AVOption scale_options[] = {
995  { "w", "Output video width", OFFSET(w_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
996  { "width", "Output video width", OFFSET(w_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
997  { "h", "Output video height", OFFSET(h_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
998  { "height","Output video height", OFFSET(h_expr), AV_OPT_TYPE_STRING, .flags = TFLAGS },
999  { "flags", "Flags to pass to libswscale", OFFSET(flags_str), AV_OPT_TYPE_STRING, { .str = "" }, .flags = FLAGS },
1000  { "interl", "set interlacing", OFFSET(interlaced), AV_OPT_TYPE_BOOL, {.i64 = 0 }, -1, 1, FLAGS },
1001  { "size", "set video size", OFFSET(size_str), AV_OPT_TYPE_STRING, {.str = NULL}, 0, .flags = FLAGS },
1002  { "s", "set video size", OFFSET(size_str), AV_OPT_TYPE_STRING, {.str = NULL}, 0, .flags = FLAGS },
1003  { "in_color_matrix", "set input YCbCr type", OFFSET(in_color_matrix), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, AVCOL_SPC_NB-1, .flags = FLAGS, .unit = "color" },
1004  { "out_color_matrix", "set output YCbCr type", OFFSET(out_color_matrix), AV_OPT_TYPE_INT, { .i64 = AVCOL_SPC_UNSPECIFIED }, 0, AVCOL_SPC_NB-1, .flags = FLAGS, .unit = "color"},
1005  { "auto", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = -1 }, 0, 0, FLAGS, .unit = "color" },
1006  { "bt601", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_BT470BG }, 0, 0, FLAGS, .unit = "color" },
1007  { "bt470", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_BT470BG }, 0, 0, FLAGS, .unit = "color" },
1008  { "smpte170m", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_BT470BG }, 0, 0, FLAGS, .unit = "color" },
1009  { "bt709", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_BT709 }, 0, 0, FLAGS, .unit = "color" },
1010  { "fcc", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_FCC }, 0, 0, FLAGS, .unit = "color" },
1011  { "smpte240m", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_SMPTE240M }, 0, 0, FLAGS, .unit = "color" },
1012  { "bt2020", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = AVCOL_SPC_BT2020_NCL }, 0, 0, FLAGS, .unit = "color" },
1013  { "in_range", "set input color range", OFFSET( in_range), AV_OPT_TYPE_INT, {.i64 = AVCOL_RANGE_UNSPECIFIED }, 0, 2, FLAGS, .unit = "range" },
1014  { "out_range", "set output color range", OFFSET(out_range), AV_OPT_TYPE_INT, {.i64 = AVCOL_RANGE_UNSPECIFIED }, 0, 2, FLAGS, .unit = "range" },
1015  { "auto", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_UNSPECIFIED }, 0, 0, FLAGS, .unit = "range" },
1016  { "unknown", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_UNSPECIFIED }, 0, 0, FLAGS, .unit = "range" },
1017  { "full", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_JPEG}, 0, 0, FLAGS, .unit = "range" },
1018  { "limited",NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_MPEG}, 0, 0, FLAGS, .unit = "range" },
1019  { "jpeg", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_JPEG}, 0, 0, FLAGS, .unit = "range" },
1020  { "mpeg", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_MPEG}, 0, 0, FLAGS, .unit = "range" },
1021  { "tv", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_MPEG}, 0, 0, FLAGS, .unit = "range" },
1022  { "pc", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = AVCOL_RANGE_JPEG}, 0, 0, FLAGS, .unit = "range" },
1023  { "in_v_chr_pos", "input vertical chroma position in luma grid/256" , OFFSET(in_v_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
1024  { "in_h_chr_pos", "input horizontal chroma position in luma grid/256", OFFSET(in_h_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
1025  { "out_v_chr_pos", "output vertical chroma position in luma grid/256" , OFFSET(out_v_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
1026  { "out_h_chr_pos", "output horizontal chroma position in luma grid/256", OFFSET(out_h_chr_pos), AV_OPT_TYPE_INT, { .i64 = -513}, -513, 512, FLAGS },
1027  { "force_original_aspect_ratio", "decrease or increase w/h if necessary to keep the original AR", OFFSET(force_original_aspect_ratio), AV_OPT_TYPE_INT, { .i64 = 0}, 0, 2, FLAGS, .unit = "force_oar" },
1028  { "disable", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 0 }, 0, 0, FLAGS, .unit = "force_oar" },
1029  { "decrease", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 1 }, 0, 0, FLAGS, .unit = "force_oar" },
1030  { "increase", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 2 }, 0, 0, FLAGS, .unit = "force_oar" },
1031  { "force_divisible_by", "enforce that the output resolution is divisible by a defined integer when force_original_aspect_ratio is used", OFFSET(force_divisible_by), AV_OPT_TYPE_INT, { .i64 = 1}, 1, 256, FLAGS },
1032  { "param0", "Scaler param 0", OFFSET(param[0]), AV_OPT_TYPE_DOUBLE, { .dbl = DBL_MAX }, -DBL_MAX, DBL_MAX, FLAGS },
1033  { "param1", "Scaler param 1", OFFSET(param[1]), AV_OPT_TYPE_DOUBLE, { .dbl = DBL_MAX }, -DBL_MAX, DBL_MAX, FLAGS },
1034  { "eval", "specify when to evaluate expressions", OFFSET(eval_mode), AV_OPT_TYPE_INT, {.i64 = EVAL_MODE_INIT}, 0, EVAL_MODE_NB-1, FLAGS, .unit = "eval" },
1035  { "init", "eval expressions once during initialization", 0, AV_OPT_TYPE_CONST, {.i64=EVAL_MODE_INIT}, .flags = FLAGS, .unit = "eval" },
1036  { "frame", "eval expressions during initialization and per-frame", 0, AV_OPT_TYPE_CONST, {.i64=EVAL_MODE_FRAME}, .flags = FLAGS, .unit = "eval" },
1037  { NULL }
1038 };
1039 
1040 static const AVClass scale_class = {
1041  .class_name = "scale(2ref)",
1042  .item_name = av_default_item_name,
1043  .option = scale_options,
1044  .version = LIBAVUTIL_VERSION_INT,
1045  .category = AV_CLASS_CATEGORY_FILTER,
1046  .child_class_iterate = child_class_iterate,
1048 };
1049 
1051  {
1052  .name = "default",
1053  .type = AVMEDIA_TYPE_VIDEO,
1054  .filter_frame = filter_frame,
1055  },
1056 };
1057 
1059  {
1060  .name = "default",
1061  .type = AVMEDIA_TYPE_VIDEO,
1062  .config_props = config_props,
1063  },
1064 };
1065 
1067  .name = "scale",
1068  .description = NULL_IF_CONFIG_SMALL("Scale the input video size and/or convert the image format."),
1069  .preinit = preinit,
1070  .init = init,
1071  .uninit = uninit,
1072  .priv_size = sizeof(ScaleContext),
1073  .priv_class = &scale_class,
1077  .process_command = process_command,
1078 };
1079 
1081  {
1082  .name = "default",
1083  .type = AVMEDIA_TYPE_VIDEO,
1084  .filter_frame = filter_frame,
1085  },
1086  {
1087  .name = "ref",
1088  .type = AVMEDIA_TYPE_VIDEO,
1089  .filter_frame = filter_frame_ref,
1090  },
1091 };
1092 
1094  {
1095  .name = "default",
1096  .type = AVMEDIA_TYPE_VIDEO,
1097  .config_props = config_props,
1098  .request_frame= request_frame,
1099  },
1100  {
1101  .name = "ref",
1102  .type = AVMEDIA_TYPE_VIDEO,
1103  .config_props = config_props_ref,
1104  .request_frame= request_frame_ref,
1105  },
1106 };
1107 
1109  .name = "scale2ref",
1110  .description = NULL_IF_CONFIG_SMALL("Scale the input video size and/or convert the image format to the given reference."),
1111  .preinit = preinit,
1112  .init = init,
1113  .uninit = uninit,
1114  .priv_size = sizeof(ScaleContext),
1115  .priv_class = &scale_class,
1119  .process_command = process_command,
1120 };
filter_frame_ref
static int filter_frame_ref(AVFilterLink *link, AVFrame *in)
Definition: vf_scale.c:911
ScaleContext::param
double param[2]
Definition: vf_scale.c:125
VAR_S2R_MAIN_SAR
@ VAR_S2R_MAIN_SAR
Definition: vf_scale.c:94
formats
formats
Definition: signature.h:48
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:112
VAR_S2R_MAIN_A
@ VAR_S2R_MAIN_A
Definition: vf_scale.c:93
VAR_HSUB
@ VAR_HSUB
Definition: vf_scale.c:82
FF_ENABLE_DEPRECATION_WARNINGS
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:73
config_props_ref
static int config_props_ref(AVFilterLink *outlink)
Definition: vf_scale.c:686
SwsContext::saturation
int saturation
Definition: swscale_internal.h:456
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVFrame::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: frame.h:654
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
TFLAGS
#define TFLAGS
Definition: vf_scale.c:992
ScaleContext::sws_opts
struct SwsContext * sws_opts
Definition: vf_scale.c:115
check_exprs
static int check_exprs(AVFilterContext *ctx)
Definition: vf_scale.c:163
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
var_name
var_name
Definition: noise.c:47
ScaleContext::input_is_pal
int input_is_pal
set to 1 if the input format is paletted
Definition: vf_scale.c:129
ff_make_format_list
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:436
out
FILE * out
Definition: movenc.c:55
sws_isSupportedOutput
#define sws_isSupportedOutput(x)
ScaleContext
Definition: vf_scale.c:110
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1015
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2965
query_formats
static int query_formats(AVFilterContext *ctx)
Definition: vf_scale.c:398
ScaleContext::force_divisible_by
int force_divisible_by
Definition: vf_scale.c:153
avfilter_vf_scale2ref_outputs
static const AVFilterPad avfilter_vf_scale2ref_outputs[]
Definition: vf_scale.c:1093
FLAGS
#define FLAGS
Definition: vf_scale.c:991
ScaleContext::flags_str
char * flags_str
Definition: vf_scale.c:139
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:160
AVFrame::colorspace
enum AVColorSpace colorspace
YUV colorspace type.
Definition: frame.h:665
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:375
pixdesc.h
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:487
AVFrame::width
int width
Definition: frame.h:447
w
uint8_t w
Definition: llviddspenc.c:38
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:686
VAR_A
@ VAR_A
Definition: vf_scale.c:79
request_frame_ref
static int request_frame_ref(AVFilterLink *outlink)
Definition: vf_scale.c:706
av_opt_set_double
int av_opt_set_double(void *obj, const char *name, double val, int search_flags)
Definition: opt.c:796
AVOption
AVOption.
Definition: opt.h:346
AVCOL_SPC_NB
@ AVCOL_SPC_NB
Not part of ABI.
Definition: pixfmt.h:629
scale_parse_expr
static int scale_parse_expr(AVFilterContext *ctx, char *str_expr, AVExpr **pexpr_ptr, const char *var, const char *args)
Definition: vf_scale.c:223
FILTER_QUERY_FUNC
#define FILTER_QUERY_FUNC(func)
Definition: internal.h:159
table
static const uint16_t table[]
Definition: prosumer.c:205
request_frame
static int request_frame(AVFilterLink *outlink)
Definition: vf_scale.c:701
av_pix_fmt_desc_next
const AVPixFmtDescriptor * av_pix_fmt_desc_next(const AVPixFmtDescriptor *prev)
Iterate over all pixel format descriptors known to libavutil.
Definition: pixdesc.c:2972
ff_request_frame
int ff_request_frame(AVFilterLink *link)
Request an input frame from the filter at the other end of the link.
Definition: avfilter.c:463
VAR_S2R_MAIN_HSUB
@ VAR_S2R_MAIN_HSUB
Definition: vf_scale.c:96
ScaleContext::var_values
double var_values[VARS_NB]
Definition: vf_scale.c:137
ScaleContext::out_range
int out_range
Definition: vf_scale.c:145
VAR_S2R_MDAR
@ VAR_S2R_MDAR
Definition: vf_scale.c:95
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVCOL_SPC_RGB
@ AVCOL_SPC_RGB
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1
Definition: pixfmt.h:610
float.h
EVAL_MODE_FRAME
@ EVAL_MODE_FRAME
Definition: vf_scale.c:106
VAR_S2R_MAIN_H
@ VAR_S2R_MAIN_H
Definition: vf_scale.c:92
AVFrame::flags
int flags
Frame flags, a combination of AV_FRAME_FLAGS.
Definition: frame.h:647
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:170
ScaleContext::in_h_chr_pos
int in_h_chr_pos
Definition: vf_scale.c:149
VAR_OUT_H
@ VAR_OUT_H
Definition: vf_scale.c:78
video.h
ff_make_formats_list_singleton
AVFilterFormats * ff_make_formats_list_singleton(int fmt)
Equivalent to ff_make_format_list({const int[]}{ fmt, -1 })
Definition: formats.c:530
AVFilterFormats
A list of supported formats for one end of a filter link.
Definition: formats.h:64
formats.h
av_expr_parse
int av_expr_parse(AVExpr **expr, const char *s, const char *const *const_names, const char *const *func1_names, double(*const *funcs1)(void *, double), const char *const *func2_names, double(*const *funcs2)(void *, double, double), int log_offset, void *log_ctx)
Parse an expression.
Definition: eval.c:710
VAR_S2R_MAIN_POS
@ VAR_S2R_MAIN_POS
Definition: vf_scale.c:100
AVCOL_SPC_BT470BG
@ AVCOL_SPC_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601
Definition: pixfmt.h:615
av_color_space_name
const char * av_color_space_name(enum AVColorSpace space)
Definition: pixdesc.c:3341
VAR_DAR
@ VAR_DAR
Definition: vf_scale.c:81
avfilter_vf_scale_inputs
static const AVFilterPad avfilter_vf_scale_inputs[]
Definition: vf_scale.c:1050
fail
#define fail()
Definition: checkasm.h:179
VARS_NB
@ VARS_NB
Definition: vf_scale.c:101
frame_offset
static void frame_offset(AVFrame *frame, int dir, int is_pal)
Definition: vf_scale.c:711
ScaleContext::isws
struct SwsContext * isws[2]
software scaler context for interlaced material
Definition: vf_scale.c:113
ScaleContext::eval_mode
int eval_mode
expression evaluation mode
Definition: vf_scale.c:155
VAR_IN_H
@ VAR_IN_H
Definition: vf_scale.c:76
EVAL_MODE_NB
@ EVAL_MODE_NB
Definition: vf_scale.c:107
sws_get_class
const AVClass * sws_get_class(void)
Get the AVClass for swsContext.
Definition: options.c:97
av_opt_set
int av_opt_set(void *obj, const char *name, const char *val, int search_flags)
Definition: opt.c:740
av_reduce
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
Definition: rational.c:35
av_expr_free
void av_expr_free(AVExpr *e)
Free a parsed expression previously created with av_expr_parse().
Definition: eval.c:358
AVRational::num
int num
Numerator.
Definition: rational.h:59
OFFSET
#define OFFSET(x)
Definition: vf_scale.c:990
preinit
static av_cold int preinit(AVFilterContext *ctx)
Definition: vf_scale.c:276
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:33
AV_PIX_FMT_BGR8
@ AV_PIX_FMT_BGR8
packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb)
Definition: pixfmt.h:90
TS2T
#define TS2T(ts, tb)
Definition: internal.h:259
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:90
ScaleContext::sws
struct SwsContext * sws
software scaler context
Definition: vf_scale.c:112
s
#define s(width, name)
Definition: cbs_vp9.c:198
VAR_OH
@ VAR_OH
Definition: vf_scale.c:78
VAR_S2R_MAIN_W
@ VAR_S2R_MAIN_W
Definition: vf_scale.c:91
SwsContext::brightness
int brightness
Definition: swscale_internal.h:456
scale_frame
static int scale_frame(AVFilterLink *link, AVFrame *in, AVFrame **frame_out)
Definition: vf_scale.c:761
ScaleContext::slice_y
int slice_y
top of current output slice
Definition: vf_scale.c:128
AVCOL_SPC_SMPTE170M
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
Definition: pixfmt.h:616
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demux_decode.c:41
AV_OPT_TYPE_DOUBLE
@ AV_OPT_TYPE_DOUBLE
Definition: opt.h:237
av_expr_count_vars
int av_expr_count_vars(AVExpr *e, unsigned *counter, int size)
Track the presence of variables and their number of occurrences in a parsed expression.
Definition: eval.c:782
ff_formats_ref
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:679
init
static av_cold int init(AVFilterContext *ctx)
Definition: vf_scale.c:305
VAR_OVSUB
@ VAR_OVSUB
Definition: vf_scale.c:85
ctx
AVFormatContext * ctx
Definition: movenc.c:49
process_command
static int process_command(AVFilterContext *ctx, const char *cmd, const char *args, char *res, int res_len, int flags)
Definition: vf_scale.c:950
av_expr_eval
double av_expr_eval(AVExpr *e, const double *const_values, void *opaque)
Evaluate a previously parsed expression.
Definition: eval.c:792
AVExpr
Definition: eval.c:158
field
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
Definition: writing_filters.txt:78
AVPixFmtDescriptor::log2_chroma_w
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
Definition: pixdesc.h:80
SwsContext::contrast
int contrast
Definition: swscale_internal.h:456
ScaleContext::w_pexpr
AVExpr * w_pexpr
Definition: vf_scale.c:135
avpriv_set_systematic_pal2
int avpriv_set_systematic_pal2(uint32_t pal[256], enum AVPixelFormat pix_fmt)
Definition: imgutils.c:178
NAN
#define NAN
Definition: mathematics.h:115
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: internal.h:182
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
ScaleContext::out_h_chr_pos
int out_h_chr_pos
Definition: vf_scale.c:147
av_color_range_name
const char * av_color_range_name(enum AVColorRange range)
Definition: pixdesc.c:3281
scale_field
static int scale_field(ScaleContext *scale, AVFrame *dst, AVFrame *src, int field)
Definition: vf_scale.c:720
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
NULL
#define NULL
Definition: coverity.c:32
ScaleContext::out_v_chr_pos
int out_v_chr_pos
Definition: vf_scale.c:148
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:709
VAR_POS
@ VAR_POS
Definition: noise.c:56
VAR_T
@ VAR_T
Definition: vf_scale.c:87
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
isnan
#define isnan(x)
Definition: libm.h:340
ScaleContext::in_range
int in_range
Definition: vf_scale.c:144
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:415
VAR_IN_W
@ VAR_IN_W
Definition: vf_scale.c:75
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:237
ff_add_format
int ff_add_format(AVFilterFormats **avff, int64_t fmt)
Add fmt to the list of media formats contained in *avff.
Definition: formats.c:505
parseutils.h
sws_alloc_context
struct SwsContext * sws_alloc_context(void)
Allocate an empty SwsContext.
Definition: utils.c:1213
ff_fmt_is_in
int ff_fmt_is_in(int fmt, const int *fmts)
Tell if an integer is contained in the provided -1-terminated list of integers.
Definition: formats.c:407
ScaleContext::h_pexpr
AVExpr * h_pexpr
Definition: vf_scale.c:136
double
double
Definition: af_crystalizer.c:131
AVCOL_SPC_YCGCO
@ AVCOL_SPC_YCGCO
used by Dirac / VC-2 and H.264 FRext, see ITU-T SG16
Definition: pixfmt.h:618
av_opt_get_int
int av_opt_get_int(void *obj, const char *name, int search_flags, int64_t *out_val)
Definition: opt.c:1205
sws_setColorspaceDetails
int sws_setColorspaceDetails(struct SwsContext *c, const int inv_table[4], int srcRange, const int table[4], int dstRange, int brightness, int contrast, int saturation)
Definition: utils.c:1030
AVPixFmtDescriptor::flags
uint64_t flags
Combination of AV_PIX_FMT_FLAG_...
Definition: pixdesc.h:94
ff_vf_scale2ref
const AVFilter ff_vf_scale2ref
Definition: vf_scale.c:159
AVCOL_RANGE_UNSPECIFIED
@ AVCOL_RANGE_UNSPECIFIED
Definition: pixfmt.h:652
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
ScaleContext::out_color_matrix
int out_color_matrix
Definition: vf_scale.c:142
av_opt_set_int
int av_opt_set_int(void *obj, const char *name, int64_t val, int search_flags)
Definition: opt.c:791
AV_CLASS_CATEGORY_FILTER
@ AV_CLASS_CATEGORY_FILTER
Definition: log.h:36
VAR_IW
@ VAR_IW
Definition: vf_scale.c:75
av_opt_copy
int av_opt_copy(void *dst, const void *src)
Copy options from src object into dest object.
Definition: opt.c:2110
eval.h
VAR_IH
@ VAR_IH
Definition: vf_scale.c:76
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:94
AVClass::child_next
void *(* child_next)(void *obj, void *prev)
Return next AVOptions-enabled child or NULL.
Definition: log.h:131
child_class_iterate
static const AVClass * child_class_iterate(void **iter)
Definition: vf_scale.c:975
ScaleContext::w
int w
New dimensions.
Definition: vf_scale.c:123
AVFrame::time_base
AVRational time_base
Time base for the timestamps in this frame.
Definition: frame.h:502
AVFrame::pkt_pos
attribute_deprecated int64_t pkt_pos
reordered pos from the last AVPacket that has been input into the decoder
Definition: frame.h:685
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:462
scale_eval.h
FF_API_FRAME_PKT
#define FF_API_FRAME_PKT
Definition: version.h:109
ScaleContext::hsub
int hsub
Definition: vf_scale.c:127
VAR_OUT_W
@ VAR_OUT_W
Definition: vf_scale.c:77
imgutils_internal.h
ff_all_color_ranges
AVFilterFormats * ff_all_color_ranges(void)
Construct an AVFilterFormats representing all possible color ranges.
Definition: formats.c:647
av_pix_fmt_desc_get_id
enum AVPixelFormat av_pix_fmt_desc_get_id(const AVPixFmtDescriptor *desc)
Definition: pixdesc.c:2984
filter_frame
static int filter_frame(AVFilterLink *link, AVFrame *in)
Definition: vf_scale.c:897
av_parse_video_size
int av_parse_video_size(int *width_ptr, int *height_ptr, const char *str)
Parse str and put in width_ptr and height_ptr the detected values.
Definition: parseutils.c:150
sws_isSupportedInput
#define sws_isSupportedInput(x)
internal.h
AVCOL_SPC_SMPTE240M
@ AVCOL_SPC_SMPTE240M
derived from 170M primaries and D65 white point, 170M is derived from BT470 System M's primaries
Definition: pixfmt.h:617
ScaleContext::vsub
int vsub
chroma subsampling
Definition: vf_scale.c:127
sws_scale_frame
int sws_scale_frame(struct SwsContext *c, AVFrame *dst, const AVFrame *src)
Scale source data from src and write the output to dst.
Definition: swscale.c:1185
config_props
static int config_props(AVFilterLink *outlink)
Definition: vf_scale.c:532
interlaced
uint8_t interlaced
Definition: mxfenc.c:2264
ScaleContext::output_is_pal
int output_is_pal
set to 1 if the output format is paletted
Definition: vf_scale.c:130
VAR_SAR
@ VAR_SAR
Definition: vf_scale.c:80
sws_isSupportedEndiannessConversion
int sws_isSupportedEndiannessConversion(enum AVPixelFormat pix_fmt)
Definition: utils.c:370
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
AVCOL_SPC_BT2020_NCL
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:620
VAR_S2R_MAIN_N
@ VAR_S2R_MAIN_N
Definition: vf_scale.c:98
internal.h
ff_filter_get_nb_threads
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:827
EvalMode
EvalMode
Definition: af_volume.h:39
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:39
AVCOL_SPC_UNSPECIFIED
@ AVCOL_SPC_UNSPECIFIED
Definition: pixfmt.h:612
ScaleContext::h_expr
char * h_expr
height expression string
Definition: vf_scale.c:134
AV_FRAME_FLAG_INTERLACED
#define AV_FRAME_FLAG_INTERLACED
A flag to mark frames whose content is interlaced.
Definition: frame.h:634
AVCOL_RANGE_MPEG
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:669
avfilter_vf_scale_outputs
static const AVFilterPad avfilter_vf_scale_outputs[]
Definition: vf_scale.c:1058
AVFilter
Filter definition.
Definition: avfilter.h:166
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:84
ret
ret
Definition: filter_design.txt:187
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:71
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
ScaleContext::in_color_matrix
int in_color_matrix
Definition: vf_scale.c:141
child_next
static void * child_next(void *obj, void *prev)
Definition: vf_scale.c:982
AVFrame::sample_aspect_ratio
AVRational sample_aspect_ratio
Sample aspect ratio for the video frame, 0/1 if unknown/unspecified.
Definition: frame.h:482
sws_getColorspaceDetails
int sws_getColorspaceDetails(struct SwsContext *c, int **inv_table, int *srcRange, int **table, int *dstRange, int *brightness, int *contrast, int *saturation)
Definition: utils.c:1189
ff_scale_adjust_dimensions
int ff_scale_adjust_dimensions(AVFilterLink *inlink, int *ret_w, int *ret_h, int force_original_aspect_ratio, int force_divisible_by)
Transform evaluated width and height obtained from ff_scale_eval_dimensions into actual target width ...
Definition: scale_eval.c:113
sws_init_context
av_warn_unused_result int sws_init_context(struct SwsContext *sws_context, SwsFilter *srcFilter, SwsFilter *dstFilter)
Initialize the swscaler context sws_context.
Definition: utils.c:2069
VAR_S2R_MAIN_T
@ VAR_S2R_MAIN_T
Definition: vf_scale.c:99
scale_eval_dimensions
static int scale_eval_dimensions(AVFilterContext *ctx)
Definition: vf_scale.c:457
var_names
static const char *const var_names[]
Definition: vf_scale.c:44
AVFrame::height
int height
Definition: frame.h:447
VAR_S2R_MAIN_DAR
@ VAR_S2R_MAIN_DAR
Definition: vf_scale.c:95
scale_options
static const AVOption scale_options[]
Definition: vf_scale.c:994
sws_freeContext
void sws_freeContext(struct SwsContext *swsContext)
Free the swscaler context swsContext.
Definition: utils.c:2433
AVRational::den
int den
Denominator.
Definition: rational.h:60
AVCOL_SPC_FCC
@ AVCOL_SPC_FCC
FCC Title 47 Code of Federal Regulations 73.682 (a)(20)
Definition: pixfmt.h:614
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:235
avfilter.h
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_scale.c:385
ScaleContext::force_original_aspect_ratio
int force_original_aspect_ratio
Definition: vf_scale.c:152
avfilter_vf_scale2ref_inputs
static const AVFilterPad avfilter_vf_scale2ref_inputs[]
Definition: vf_scale.c:1080
av_mul_q
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
Definition: rational.c:80
AVFilterContext
An instance of a filter.
Definition: avfilter.h:407
VAR_OW
@ VAR_OW
Definition: vf_scale.c:77
FF_DISABLE_DEPRECATION_WARNINGS
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:72
av_strdup
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:272
desc
const char * desc
Definition: libsvtav1.c:75
VAR_VSUB
@ VAR_VSUB
Definition: vf_scale.c:83
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
mem.h
sws_getCoefficients
const int * sws_getCoefficients(int colorspace)
Return a pointer to yuv<->rgb coefficients for the given colorspace suitable for sws_setColorspaceDet...
Definition: yuv2rgb.c:61
sws_colorspaces
static const int sws_colorspaces[]
Definition: vf_scale.c:293
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
ScaleContext::interlaced
int interlaced
Definition: vf_scale.c:131
av_free
#define av_free(p)
Definition: tableprint_vlc.h:33
scale
static void scale(int *out, const int *in, const int w, const int h, const int shift)
Definition: intra.c:291
VAR_N
@ VAR_N
Definition: vf_scale.c:86
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Definition: opt.h:251
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: internal.h:183
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
scale_class
static const AVClass scale_class
Definition: vf_scale.c:1040
ScaleContext::w_expr
char * w_expr
width expression string
Definition: vf_scale.c:133
EVAL_MODE_INIT
@ EVAL_MODE_INIT
Definition: vf_scale.c:105
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:474
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:420
av_opt_get
int av_opt_get(void *obj, const char *name, int search_flags, uint8_t **out_val)
Definition: opt.c:1147
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
h
h
Definition: vp9dsp_template.c:2038
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Definition: opt.h:239
VAR_OHSUB
@ VAR_OHSUB
Definition: vf_scale.c:84
AVCOL_SPC_BT709
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
Definition: pixfmt.h:611
int
int
Definition: ffmpeg_filter.c:424
SwsContext
Definition: swscale_internal.h:301
AV_PIX_FMT_FLAG_PAL
#define AV_PIX_FMT_FLAG_PAL
Pixel format has a palette in data[1], values are indexes in this palette.
Definition: pixdesc.h:120
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:244
ff_vf_scale
const AVFilter ff_vf_scale
Definition: vf_scale.c:1066
snprintf
#define snprintf
Definition: snprintf.h:34
ScaleContext::size_str
char * size_str
Definition: vf_scale.c:124
VAR_S2R_MAIN_VSUB
@ VAR_S2R_MAIN_VSUB
Definition: vf_scale.c:97
AVPixFmtDescriptor::log2_chroma_h
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:89
swscale.h
ScaleContext::h
int h
Definition: vf_scale.c:123
av_x_if_null
static void * av_x_if_null(const void *p, const void *x)
Return x default pointer in case p is NULL.
Definition: avutil.h:312
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2885
ScaleContext::in_v_chr_pos
int in_v_chr_pos
Definition: vf_scale.c:150
SwsContext::param
double param[2]
Input parameters for scaling algorithms that need them.
Definition: swscale_internal.h:344