FFmpeg
vf_psnr.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2011 Roger Pau Monné <roger.pau@entel.upc.edu>
3  * Copyright (c) 2011 Stefano Sabatini
4  * Copyright (c) 2013 Paul B Mahol
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 /**
24  * @file
25  * Calculate the PSNR between two input videos.
26  */
27 
28 #include "libavutil/avstring.h"
29 #include "libavutil/file_open.h"
30 #include "libavutil/opt.h"
31 #include "libavutil/pixdesc.h"
32 #include "avfilter.h"
33 #include "drawutils.h"
34 #include "framesync.h"
35 #include "internal.h"
36 #include "psnr.h"
37 
38 typedef struct PSNRContext {
39  const AVClass *class;
41  double mse, min_mse, max_mse, mse_comp[4];
42  uint64_t nb_frames;
43  FILE *stats_file;
48  int max[4], average_max;
49  int is_rgb;
50  uint8_t rgba_map[4];
51  char comps[4];
54  int planewidth[4];
55  int planeheight[4];
56  double planeweight[4];
57  uint64_t **score;
59 } PSNRContext;
60 
61 #define OFFSET(x) offsetof(PSNRContext, x)
62 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
63 
64 static const AVOption psnr_options[] = {
65  {"stats_file", "Set file where to store per-frame difference information", OFFSET(stats_file_str), AV_OPT_TYPE_STRING, {.str=NULL}, 0, 0, FLAGS },
66  {"f", "Set file where to store per-frame difference information", OFFSET(stats_file_str), AV_OPT_TYPE_STRING, {.str=NULL}, 0, 0, FLAGS },
67  {"stats_version", "Set the format version for the stats file.", OFFSET(stats_version), AV_OPT_TYPE_INT, {.i64=1}, 1, 2, FLAGS },
68  {"output_max", "Add raw stats (max values) to the output log.", OFFSET(stats_add_max), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS},
69  { NULL }
70 };
71 
73 
74 static inline unsigned pow_2(unsigned base)
75 {
76  return base*base;
77 }
78 
79 static inline double get_psnr(double mse, uint64_t nb_frames, int max)
80 {
81  return 10.0 * log10(pow_2(max) / (mse / nb_frames));
82 }
83 
84 static uint64_t sse_line_8bit(const uint8_t *main_line, const uint8_t *ref_line, int outw)
85 {
86  int j;
87  unsigned m2 = 0;
88 
89  for (j = 0; j < outw; j++)
90  m2 += pow_2(main_line[j] - ref_line[j]);
91 
92  return m2;
93 }
94 
95 static uint64_t sse_line_16bit(const uint8_t *_main_line, const uint8_t *_ref_line, int outw)
96 {
97  int j;
98  uint64_t m2 = 0;
99  const uint16_t *main_line = (const uint16_t *) _main_line;
100  const uint16_t *ref_line = (const uint16_t *) _ref_line;
101 
102  for (j = 0; j < outw; j++)
103  m2 += pow_2(main_line[j] - ref_line[j]);
104 
105  return m2;
106 }
107 
108 typedef struct ThreadData {
109  const uint8_t *main_data[4];
110  const uint8_t *ref_data[4];
111  int main_linesize[4];
112  int ref_linesize[4];
113  int planewidth[4];
114  int planeheight[4];
115  uint64_t **score;
116  int nb_components;
118 } ThreadData;
119 
120 static
122  int jobnr, int nb_jobs)
123 {
124  ThreadData *td = arg;
125  uint64_t *score = td->score[jobnr];
126 
127  for (int c = 0; c < td->nb_components; c++) {
128  const int outw = td->planewidth[c];
129  const int outh = td->planeheight[c];
130  const int slice_start = (outh * jobnr) / nb_jobs;
131  const int slice_end = (outh * (jobnr+1)) / nb_jobs;
132  const int ref_linesize = td->ref_linesize[c];
133  const int main_linesize = td->main_linesize[c];
134  const uint8_t *main_line = td->main_data[c] + main_linesize * slice_start;
135  const uint8_t *ref_line = td->ref_data[c] + ref_linesize * slice_start;
136  uint64_t m = 0;
137  for (int i = slice_start; i < slice_end; i++) {
138  m += td->dsp->sse_line(main_line, ref_line, outw);
139  ref_line += ref_linesize;
140  main_line += main_linesize;
141  }
142  score[c] = m;
143  }
144 
145  return 0;
146 }
147 
148 static void set_meta(AVDictionary **metadata, const char *key, char comp, float d)
149 {
150  char value[128];
151  snprintf(value, sizeof(value), "%f", d);
152  if (comp) {
153  char key2[128];
154  snprintf(key2, sizeof(key2), "%s%c", key, comp);
155  av_dict_set(metadata, key2, value, 0);
156  } else {
157  av_dict_set(metadata, key, value, 0);
158  }
159 }
160 
161 static int do_psnr(FFFrameSync *fs)
162 {
163  AVFilterContext *ctx = fs->parent;
164  PSNRContext *s = ctx->priv;
165  AVFrame *master, *ref;
166  double comp_mse[4], mse = 0.;
167  uint64_t comp_sum[4] = { 0 };
168  AVDictionary **metadata;
169  ThreadData td;
170  int ret;
171 
173  if (ret < 0)
174  return ret;
175  if (ctx->is_disabled || !ref)
176  return ff_filter_frame(ctx->outputs[0], master);
177  metadata = &master->metadata;
178 
179  td.nb_components = s->nb_components;
180  td.dsp = &s->dsp;
181  td.score = s->score;
182  for (int c = 0; c < s->nb_components; c++) {
183  td.main_data[c] = master->data[c];
184  td.ref_data[c] = ref->data[c];
185  td.main_linesize[c] = master->linesize[c];
186  td.ref_linesize[c] = ref->linesize[c];
187  td.planewidth[c] = s->planewidth[c];
188  td.planeheight[c] = s->planeheight[c];
189  }
190 
191  if (master->color_range != ref->color_range) {
192  av_log(ctx, AV_LOG_WARNING, "master and reference "
193  "frames use different color ranges (%s != %s)\n",
194  av_color_range_name(master->color_range),
195  av_color_range_name(ref->color_range));
196  }
197 
199  FFMIN(s->planeheight[1], s->nb_threads));
200 
201  for (int j = 0; j < s->nb_threads; j++) {
202  for (int c = 0; c < s->nb_components; c++)
203  comp_sum[c] += s->score[j][c];
204  }
205 
206  for (int c = 0; c < s->nb_components; c++)
207  comp_mse[c] = comp_sum[c] / ((double)s->planewidth[c] * s->planeheight[c]);
208 
209  for (int c = 0; c < s->nb_components; c++)
210  mse += comp_mse[c] * s->planeweight[c];
211 
212  s->min_mse = FFMIN(s->min_mse, mse);
213  s->max_mse = FFMAX(s->max_mse, mse);
214 
215  s->mse += mse;
216 
217  for (int j = 0; j < s->nb_components; j++)
218  s->mse_comp[j] += comp_mse[j];
219  s->nb_frames++;
220 
221  for (int j = 0; j < s->nb_components; j++) {
222  int c = s->is_rgb ? s->rgba_map[j] : j;
223  set_meta(metadata, "lavfi.psnr.mse.", s->comps[j], comp_mse[c]);
224  set_meta(metadata, "lavfi.psnr.psnr.", s->comps[j], get_psnr(comp_mse[c], 1, s->max[c]));
225  }
226  set_meta(metadata, "lavfi.psnr.mse_avg", 0, mse);
227  set_meta(metadata, "lavfi.psnr.psnr_avg", 0, get_psnr(mse, 1, s->average_max));
228 
229  if (s->stats_file) {
230  if (s->stats_version == 2 && !s->stats_header_written) {
231  fprintf(s->stats_file, "psnr_log_version:2 fields:n");
232  fprintf(s->stats_file, ",mse_avg");
233  for (int j = 0; j < s->nb_components; j++) {
234  fprintf(s->stats_file, ",mse_%c", s->comps[j]);
235  }
236  fprintf(s->stats_file, ",psnr_avg");
237  for (int j = 0; j < s->nb_components; j++) {
238  fprintf(s->stats_file, ",psnr_%c", s->comps[j]);
239  }
240  if (s->stats_add_max) {
241  fprintf(s->stats_file, ",max_avg");
242  for (int j = 0; j < s->nb_components; j++) {
243  fprintf(s->stats_file, ",max_%c", s->comps[j]);
244  }
245  }
246  fprintf(s->stats_file, "\n");
247  s->stats_header_written = 1;
248  }
249  fprintf(s->stats_file, "n:%"PRId64" mse_avg:%0.2f ", s->nb_frames, mse);
250  for (int j = 0; j < s->nb_components; j++) {
251  int c = s->is_rgb ? s->rgba_map[j] : j;
252  fprintf(s->stats_file, "mse_%c:%0.2f ", s->comps[j], comp_mse[c]);
253  }
254  fprintf(s->stats_file, "psnr_avg:%0.2f ", get_psnr(mse, 1, s->average_max));
255  for (int j = 0; j < s->nb_components; j++) {
256  int c = s->is_rgb ? s->rgba_map[j] : j;
257  fprintf(s->stats_file, "psnr_%c:%0.2f ", s->comps[j],
258  get_psnr(comp_mse[c], 1, s->max[c]));
259  }
260  if (s->stats_version == 2 && s->stats_add_max) {
261  fprintf(s->stats_file, "max_avg:%d ", s->average_max);
262  for (int j = 0; j < s->nb_components; j++) {
263  int c = s->is_rgb ? s->rgba_map[j] : j;
264  fprintf(s->stats_file, "max_%c:%d ", s->comps[j], s->max[c]);
265  }
266  }
267  fprintf(s->stats_file, "\n");
268  }
269 
270  return ff_filter_frame(ctx->outputs[0], master);
271 }
272 
274 {
275  PSNRContext *s = ctx->priv;
276 
277  s->min_mse = +INFINITY;
278  s->max_mse = -INFINITY;
279 
280  if (s->stats_file_str) {
281  if (s->stats_version < 2 && s->stats_add_max) {
283  "stats_add_max was specified but stats_version < 2.\n" );
284  return AVERROR(EINVAL);
285  }
286  if (!strcmp(s->stats_file_str, "-")) {
287  s->stats_file = stdout;
288  } else {
289  s->stats_file = avpriv_fopen_utf8(s->stats_file_str, "w");
290  if (!s->stats_file) {
291  int err = AVERROR(errno);
292  char buf[128];
293  av_strerror(err, buf, sizeof(buf));
294  av_log(ctx, AV_LOG_ERROR, "Could not open stats file %s: %s\n",
295  s->stats_file_str, buf);
296  return err;
297  }
298  }
299  }
300 
301  s->fs.on_event = do_psnr;
302  return 0;
303 }
304 
305 static const enum AVPixelFormat pix_fmts[] = {
307 #define PF_NOALPHA(suf) AV_PIX_FMT_YUV420##suf, AV_PIX_FMT_YUV422##suf, AV_PIX_FMT_YUV444##suf
308 #define PF_ALPHA(suf) AV_PIX_FMT_YUVA420##suf, AV_PIX_FMT_YUVA422##suf, AV_PIX_FMT_YUVA444##suf
309 #define PF(suf) PF_NOALPHA(suf), PF_ALPHA(suf)
310  PF(P), PF(P9), PF(P10), PF_NOALPHA(P12), PF_NOALPHA(P14), PF(P16),
318 };
319 
321 {
323  AVFilterContext *ctx = inlink->dst;
324  PSNRContext *s = ctx->priv;
325  double average_max;
326  unsigned sum;
327  int j;
328 
329  s->nb_threads = ff_filter_get_nb_threads(ctx);
330  s->nb_components = desc->nb_components;
331  if (ctx->inputs[0]->w != ctx->inputs[1]->w ||
332  ctx->inputs[0]->h != ctx->inputs[1]->h) {
333  av_log(ctx, AV_LOG_ERROR, "Width and height of input videos must be same.\n");
334  return AVERROR(EINVAL);
335  }
336 
337  s->max[0] = (1 << desc->comp[0].depth) - 1;
338  s->max[1] = (1 << desc->comp[1].depth) - 1;
339  s->max[2] = (1 << desc->comp[2].depth) - 1;
340  s->max[3] = (1 << desc->comp[3].depth) - 1;
341 
342  s->is_rgb = ff_fill_rgba_map(s->rgba_map, inlink->format) >= 0;
343  s->comps[0] = s->is_rgb ? 'r' : 'y' ;
344  s->comps[1] = s->is_rgb ? 'g' : 'u' ;
345  s->comps[2] = s->is_rgb ? 'b' : 'v' ;
346  s->comps[3] = 'a';
347 
348  s->planeheight[1] = s->planeheight[2] = AV_CEIL_RSHIFT(inlink->h, desc->log2_chroma_h);
349  s->planeheight[0] = s->planeheight[3] = inlink->h;
350  s->planewidth[1] = s->planewidth[2] = AV_CEIL_RSHIFT(inlink->w, desc->log2_chroma_w);
351  s->planewidth[0] = s->planewidth[3] = inlink->w;
352  sum = 0;
353  for (j = 0; j < s->nb_components; j++)
354  sum += s->planeheight[j] * s->planewidth[j];
355  average_max = 0;
356  for (j = 0; j < s->nb_components; j++) {
357  s->planeweight[j] = (double) s->planeheight[j] * s->planewidth[j] / sum;
358  average_max += s->max[j] * s->planeweight[j];
359  }
360  s->average_max = lrint(average_max);
361 
362  s->dsp.sse_line = desc->comp[0].depth > 8 ? sse_line_16bit : sse_line_8bit;
363 #if ARCH_X86
364  ff_psnr_init_x86(&s->dsp, desc->comp[0].depth);
365 #endif
366 
367  s->score = av_calloc(s->nb_threads, sizeof(*s->score));
368  if (!s->score)
369  return AVERROR(ENOMEM);
370 
371  for (int t = 0; t < s->nb_threads; t++) {
372  s->score[t] = av_calloc(s->nb_components, sizeof(*s->score[0]));
373  if (!s->score[t])
374  return AVERROR(ENOMEM);
375  }
376 
377  return 0;
378 }
379 
380 static int config_output(AVFilterLink *outlink)
381 {
382  AVFilterContext *ctx = outlink->src;
383  PSNRContext *s = ctx->priv;
384  AVFilterLink *mainlink = ctx->inputs[0];
385  int ret;
386 
388  if (ret < 0)
389  return ret;
390  outlink->w = mainlink->w;
391  outlink->h = mainlink->h;
392  outlink->time_base = mainlink->time_base;
393  outlink->sample_aspect_ratio = mainlink->sample_aspect_ratio;
394  outlink->frame_rate = mainlink->frame_rate;
395  if ((ret = ff_framesync_configure(&s->fs)) < 0)
396  return ret;
397 
398  outlink->time_base = s->fs.time_base;
399 
400  if (av_cmp_q(mainlink->time_base, outlink->time_base) ||
401  av_cmp_q(ctx->inputs[1]->time_base, outlink->time_base))
402  av_log(ctx, AV_LOG_WARNING, "not matching timebases found between first input: %d/%d and second input %d/%d, results may be incorrect!\n",
403  mainlink->time_base.num, mainlink->time_base.den,
404  ctx->inputs[1]->time_base.num, ctx->inputs[1]->time_base.den);
405 
406  return 0;
407 }
408 
410 {
411  PSNRContext *s = ctx->priv;
412  return ff_framesync_activate(&s->fs);
413 }
414 
416 {
417  PSNRContext *s = ctx->priv;
418 
419  if (s->nb_frames > 0) {
420  int j;
421  char buf[256];
422 
423  buf[0] = 0;
424  for (j = 0; j < s->nb_components; j++) {
425  int c = s->is_rgb ? s->rgba_map[j] : j;
426  av_strlcatf(buf, sizeof(buf), " %c:%f", s->comps[j],
427  get_psnr(s->mse_comp[c], s->nb_frames, s->max[c]));
428  }
429  av_log(ctx, AV_LOG_INFO, "PSNR%s average:%f min:%f max:%f\n",
430  buf,
431  get_psnr(s->mse, s->nb_frames, s->average_max),
432  get_psnr(s->max_mse, 1, s->average_max),
433  get_psnr(s->min_mse, 1, s->average_max));
434  }
435 
436  ff_framesync_uninit(&s->fs);
437  for (int t = 0; t < s->nb_threads && s->score; t++)
438  av_freep(&s->score[t]);
439  av_freep(&s->score);
440 
441  if (s->stats_file && s->stats_file != stdout)
442  fclose(s->stats_file);
443 }
444 
445 static const AVFilterPad psnr_inputs[] = {
446  {
447  .name = "main",
448  .type = AVMEDIA_TYPE_VIDEO,
449  },{
450  .name = "reference",
451  .type = AVMEDIA_TYPE_VIDEO,
452  .config_props = config_input_ref,
453  },
454 };
455 
456 static const AVFilterPad psnr_outputs[] = {
457  {
458  .name = "default",
459  .type = AVMEDIA_TYPE_VIDEO,
460  .config_props = config_output,
461  },
462 };
463 
465  .name = "psnr",
466  .description = NULL_IF_CONFIG_SMALL("Calculate the PSNR between two video streams."),
467  .preinit = psnr_framesync_preinit,
468  .init = init,
469  .uninit = uninit,
470  .activate = activate,
471  .priv_size = sizeof(PSNRContext),
472  .priv_class = &psnr_class,
479 };
AV_PIX_FMT_GBRAP16
#define AV_PIX_FMT_GBRAP16
Definition: pixfmt.h:505
ff_framesync_configure
int ff_framesync_configure(FFFrameSync *fs)
Configure a frame sync structure.
Definition: framesync.c:134
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
PSNRContext::stats_version
int stats_version
Definition: vf_psnr.c:45
config_input_ref
static int config_input_ref(AVFilterLink *inlink)
Definition: vf_psnr.c:320
td
#define td
Definition: regdef.h:70
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
PSNRContext::max_mse
double max_mse
Definition: vf_psnr.c:41
do_psnr
static int do_psnr(FFFrameSync *fs)
Definition: vf_psnr.c:161
INFINITY
#define INFINITY
Definition: mathematics.h:118
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
set_meta
static void set_meta(AVDictionary **metadata, const char *key, char comp, float d)
Definition: vf_psnr.c:148
PSNRContext::max
int max[4]
Definition: vf_psnr.c:48
ff_framesync_uninit
void ff_framesync_uninit(FFFrameSync *fs)
Free all memory currently allocated.
Definition: framesync.c:304
PSNRContext
Definition: vf_psnr.c:38
PSNRContext::planewidth
int planewidth[4]
Definition: vf_psnr.c:54
comp
static void comp(unsigned char *dst, ptrdiff_t dst_stride, unsigned char *src, ptrdiff_t src_stride, int add)
Definition: eamad.c:80
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1007
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2968
FILTER_PIXFMTS_ARRAY
#define FILTER_PIXFMTS_ARRAY(array)
Definition: internal.h:162
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
PSNRContext::is_rgb
int is_rgb
Definition: vf_psnr.c:49
PSNRContext::stats_file_str
char * stats_file_str
Definition: vf_psnr.c:44
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:340
pixdesc.h
ThreadData::ref_linesize
int ref_linesize
Definition: vf_bm3d.c:57
AVOption
AVOption.
Definition: opt.h:251
base
uint8_t base
Definition: vp3data.h:128
AV_PIX_FMT_YUV440P
@ AV_PIX_FMT_YUV440P
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:106
max
#define max(a, b)
Definition: cuda_runtime.h:33
AVDictionary
Definition: dict.c:34
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:170
FFFrameSync
Frame sync structure.
Definition: framesync.h:168
init
static av_cold int init(AVFilterContext *ctx)
Definition: vf_psnr.c:273
psnr
static double psnr(double d)
Definition: ffmpeg_enc.c:552
av_strlcatf
size_t av_strlcatf(char *dst, size_t size, const char *fmt,...)
Definition: avstring.c:103
ff_psnr_init_x86
void ff_psnr_init_x86(PSNRDSPContext *dsp, int bpp)
Definition: vf_psnr_init.c:28
AV_PIX_FMT_GRAY9
#define AV_PIX_FMT_GRAY9
Definition: pixfmt.h:462
psnr_inputs
static const AVFilterPad psnr_inputs[]
Definition: vf_psnr.c:445
AV_PIX_FMT_GBRP14
#define AV_PIX_FMT_GBRP14
Definition: pixfmt.h:500
AV_PIX_FMT_GBRAP
@ AV_PIX_FMT_GBRAP
planar GBRA 4:4:4:4 32bpp
Definition: pixfmt.h:212
av_strerror
int av_strerror(int errnum, char *errbuf, size_t errbuf_size)
Put a description of the AVERROR code errnum in errbuf.
Definition: error.c:108
AV_PIX_FMT_GBRP10
#define AV_PIX_FMT_GBRP10
Definition: pixfmt.h:498
AV_PIX_FMT_GRAY16
#define AV_PIX_FMT_GRAY16
Definition: pixfmt.h:466
AVRational::num
int num
Numerator.
Definition: rational.h:59
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:33
AV_PIX_FMT_YUVJ411P
@ AV_PIX_FMT_YUVJ411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor ...
Definition: pixfmt.h:283
slice_start
static int slice_start(SliceContext *sc, VVCContext *s, VVCFrameContext *fc, const CodedBitstreamUnit *unit, const int is_first_slice)
Definition: vvcdec.c:685
lrint
#define lrint
Definition: tablegen.h:53
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
av_cold
#define av_cold
Definition: attributes.h:90
psnr.h
AV_PIX_FMT_YUVJ422P
@ AV_PIX_FMT_YUVJ422P
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:86
AV_PIX_FMT_GBRAP10
#define AV_PIX_FMT_GBRAP10
Definition: pixfmt.h:502
s
#define s(width, name)
Definition: cbs_vp9.c:198
AV_PIX_FMT_GBRAP12
#define AV_PIX_FMT_GBRAP12
Definition: pixfmt.h:503
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:58
slice_end
static int slice_end(AVCodecContext *avctx, AVFrame *pict)
Handle slice ends.
Definition: mpeg12dec.c:1717
FRAMESYNC_DEFINE_CLASS
FRAMESYNC_DEFINE_CLASS(psnr, PSNRContext, fs)
PSNRContext::mse
double mse
Definition: vf_psnr.c:41
ctx
AVFormatContext * ctx
Definition: movenc.c:48
AV_PIX_FMT_GRAY14
#define AV_PIX_FMT_GRAY14
Definition: pixfmt.h:465
key
const char * key
Definition: hwcontext_opencl.c:174
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: internal.h:182
file_open.h
AV_PIX_FMT_YUVJ444P
@ AV_PIX_FMT_YUVJ444P
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:87
arg
const char * arg
Definition: jacosubdec.c:67
AV_PIX_FMT_GRAY10
#define AV_PIX_FMT_GRAY10
Definition: pixfmt.h:463
av_color_range_name
const char * av_color_range_name(enum AVColorRange range)
Definition: pixdesc.c:3284
AV_PIX_FMT_GBRP16
#define AV_PIX_FMT_GBRP16
Definition: pixfmt.h:501
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
NULL
#define NULL
Definition: coverity.c:32
fs
#define fs(width, name, subs,...)
Definition: cbs_vp9.c:200
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:85
PSNRContext::comps
char comps[4]
Definition: vf_psnr.c:51
double
double
Definition: af_crystalizer.c:131
ThreadData::planewidth
int planewidth[4]
Definition: vf_identity.c:88
psnr_options
static const AVOption psnr_options[]
Definition: vf_psnr.c:64
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:81
AV_PIX_FMT_GBRP9
#define AV_PIX_FMT_GBRP9
Definition: pixfmt.h:497
PSNRDSPContext
Definition: psnr.h:27
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
sse_line_8bit
static uint64_t sse_line_8bit(const uint8_t *main_line, const uint8_t *ref_line, int outw)
Definition: vf_psnr.c:84
PSNRContext::stats_header_written
int stats_header_written
Definition: vf_psnr.c:46
ThreadData::dsp
PSNRDSPContext * dsp
Definition: vf_psnr.c:117
PSNRContext::rgba_map
uint8_t rgba_map[4]
Definition: vf_psnr.c:50
PF_NOALPHA
#define PF_NOALPHA(suf)
sse_line_16bit
static uint64_t sse_line_16bit(const uint8_t *_main_line, const uint8_t *_ref_line, int outw)
Definition: vf_psnr.c:95
psnr_outputs
static const AVFilterPad psnr_outputs[]
Definition: vf_psnr.c:456
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:106
ff_framesync_init_dualinput
int ff_framesync_init_dualinput(FFFrameSync *fs, AVFilterContext *parent)
Initialize a frame sync structure for dualinput.
Definition: framesync.c:375
master
const char * master
Definition: vf_curves.c:129
P
#define P
PSNRContext::nb_components
int nb_components
Definition: vf_psnr.c:52
PSNRContext::fs
FFFrameSync fs
Definition: vf_psnr.c:40
pow_2
static unsigned pow_2(unsigned base)
Definition: vf_psnr.c:74
PSNRContext::planeheight
int planeheight[4]
Definition: vf_psnr.c:55
for
for(k=2;k<=8;++k)
Definition: h264pred_template.c:425
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_psnr.c:415
PSNRContext::min_mse
double min_mse
Definition: vf_psnr.c:41
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: vf_psnr.c:305
PF
#define PF(suf)
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:191
internal.h
ThreadData::score
uint64_t ** score
Definition: vf_identity.c:90
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:255
AV_PIX_FMT_GBRP12
#define AV_PIX_FMT_GBRP12
Definition: pixfmt.h:499
ff_filter_get_nb_threads
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:814
ThreadData
Used for passing data between threads.
Definition: dsddec.c:69
config_output
static int config_output(AVFilterLink *outlink)
Definition: vf_psnr.c:380
PSNRContext::mse_comp
double mse_comp[4]
Definition: vf_psnr.c:41
value
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default value
Definition: writing_filters.txt:86
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
AV_PIX_FMT_YUVJ440P
@ AV_PIX_FMT_YUVJ440P
planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range
Definition: pixfmt.h:107
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:39
avpriv_fopen_utf8
FILE * avpriv_fopen_utf8(const char *path, const char *mode)
Open a file using a UTF-8 filename.
Definition: file_open.c:159
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:262
PSNRContext::planeweight
double planeweight[4]
Definition: vf_psnr.c:56
PSNRContext::average_max
int average_max
Definition: vf_psnr.c:48
PSNRContext::dsp
PSNRDSPContext dsp
Definition: vf_psnr.c:58
PSNRContext::stats_add_max
int stats_add_max
Definition: vf_psnr.c:47
av_cmp_q
static int av_cmp_q(AVRational a, AVRational b)
Compare two rationals.
Definition: rational.h:89
AVFilter
Filter definition.
Definition: avfilter.h:166
ret
ret
Definition: filter_design.txt:187
PSNRContext::nb_threads
int nb_threads
Definition: vf_psnr.c:53
framesync.h
get_psnr
static double get_psnr(double mse, uint64_t nb_frames, int max)
Definition: vf_psnr.c:79
AVRational::den
int den
Denominator.
Definition: rational.h:60
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:225
avfilter.h
AVFILTER_FLAG_METADATA_ONLY
#define AVFILTER_FLAG_METADATA_ONLY
The filter is a "metadata" filter - it does not modify the frame data in any way.
Definition: avfilter.h:133
ref
static int ref[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:112
ThreadData::main_linesize
int main_linesize[4]
Definition: vf_identity.c:86
activate
static int activate(AVFilterContext *ctx)
Definition: vf_psnr.c:409
AVFilterContext
An instance of a filter.
Definition: avfilter.h:409
AV_PIX_FMT_GBRP
@ AV_PIX_FMT_GBRP
planar GBR 4:4:4 24bpp
Definition: pixfmt.h:165
AVFILTER_FLAG_SLICE_THREADS
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:117
desc
const char * desc
Definition: libsvtav1.c:83
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
ThreadData::nb_components
int nb_components
Definition: vf_identity.c:91
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Definition: opt.h:244
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: internal.h:183
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:88
FLAGS
#define FLAGS
Definition: vf_psnr.c:62
compute_images_mse
static int compute_images_mse(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_psnr.c:121
AV_PIX_FMT_YUV411P
@ AV_PIX_FMT_YUV411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:80
ff_fill_rgba_map
int ff_fill_rgba_map(uint8_t *rgba_map, enum AVPixelFormat pix_fmt)
Definition: drawutils.c:35
ThreadData::planeheight
int planeheight[4]
Definition: vf_identity.c:89
d
d
Definition: ffmpeg_filter.c:424
AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL
#define AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL
Same as AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC, except that the filter will have its filter_frame() c...
Definition: avfilter.h:155
AV_PIX_FMT_YUV410P
@ AV_PIX_FMT_YUV410P
planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
Definition: pixfmt.h:79
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
PSNRContext::nb_frames
uint64_t nb_frames
Definition: vf_psnr.c:42
ff_framesync_activate
int ff_framesync_activate(FFFrameSync *fs)
Examine the frames in the filter's input and try to produce output.
Definition: framesync.c:355
avstring.h
ff_framesync_dualinput_get
int ff_framesync_dualinput_get(FFFrameSync *fs, AVFrame **f0, AVFrame **f1)
Definition: framesync.c:393
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Definition: opt.h:229
AV_PIX_FMT_GRAY12
#define AV_PIX_FMT_GRAY12
Definition: pixfmt.h:464
drawutils.h
PSNRContext::stats_file
FILE * stats_file
Definition: vf_psnr.c:43
ff_filter_execute
static av_always_inline int ff_filter_execute(AVFilterContext *ctx, avfilter_action_func *func, void *arg, int *ret, int nb_jobs)
Definition: internal.h:134
ThreadData::ref_data
const uint8_t * ref_data[4]
Definition: vf_identity.c:85
snprintf
#define snprintf
Definition: snprintf.h:34
OFFSET
#define OFFSET(x)
Definition: vf_psnr.c:61
PSNRContext::score
uint64_t ** score
Definition: vf_psnr.c:57
ff_vf_psnr
const AVFilter ff_vf_psnr
Definition: vf_psnr.c:464
ThreadData::main_data
const uint8_t * main_data[4]
Definition: vf_identity.c:84