FFmpeg
vf_signalstats.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2010 Mark Heath mjpeg0 @ silicontrip dot org
3  * Copyright (c) 2014 Clément Bœsch
4  * Copyright (c) 2014 Dave Rice @dericed
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #include "libavutil/intreadwrite.h"
24 #include "libavutil/mem.h"
25 #include "libavutil/opt.h"
26 #include "libavutil/pixdesc.h"
27 #include "filters.h"
28 #include "internal.h"
29 
30 enum FilterMode {
36 };
37 
38 typedef struct SignalstatsContext {
39  const AVClass *class;
40  int chromah; // height of chroma plane
41  int chromaw; // width of chroma plane
42  int hsub; // horizontal subsampling
43  int vsub; // vertical subsampling
44  int depth; // pixel depth
45  int fs; // pixel count per frame
46  int cfs; // pixel count per frame of chroma planes
47  int outfilter; // FilterMode
48  int filters;
50  uint8_t rgba_color[4];
51  int yuv_color[3];
52  int nb_jobs;
53  int *jobs_rets;
54 
55  int maxsize; // history stats array size
56  int *histy, *histu, *histv, *histsat;
57 
61 
62 typedef struct ThreadData {
63  const AVFrame *in;
64  AVFrame *out;
65 } ThreadData;
66 
67 typedef struct ThreadDataHueSatMetrics {
68  const AVFrame *src;
71 
72 #define OFFSET(x) offsetof(SignalstatsContext, x)
73 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
74 
75 static const AVOption signalstats_options[] = {
76  {"stat", "set statistics filters", OFFSET(filters), AV_OPT_TYPE_FLAGS, {.i64=0}, 0, INT_MAX, FLAGS, .unit = "filters"},
77  {"tout", "analyze pixels for temporal outliers", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_TOUT}, 0, 0, FLAGS, .unit = "filters"},
78  {"vrep", "analyze video lines for vertical line repetition", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_VREP}, 0, 0, FLAGS, .unit = "filters"},
79  {"brng", "analyze for pixels outside of broadcast range", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_BRNG}, 0, 0, FLAGS, .unit = "filters"},
80  {"out", "set video filter", OFFSET(outfilter), AV_OPT_TYPE_INT, {.i64=FILTER_NONE}, -1, FILT_NUMB-1, FLAGS, .unit = "out"},
81  {"tout", "highlight pixels that depict temporal outliers", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_TOUT}, 0, 0, FLAGS, .unit = "out"},
82  {"vrep", "highlight video lines that depict vertical line repetition", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_VREP}, 0, 0, FLAGS, .unit = "out"},
83  {"brng", "highlight pixels that are outside of broadcast range", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_BRNG}, 0, 0, FLAGS, .unit = "out"},
84  {"c", "set highlight color", OFFSET(rgba_color), AV_OPT_TYPE_COLOR, {.str="yellow"}, .flags=FLAGS},
85  {"color", "set highlight color", OFFSET(rgba_color), AV_OPT_TYPE_COLOR, {.str="yellow"}, .flags=FLAGS},
86  {NULL}
87 };
88 
89 AVFILTER_DEFINE_CLASS(signalstats);
90 
92 {
93  uint8_t r, g, b;
94  SignalstatsContext *s = ctx->priv;
95 
96  if (s->outfilter != FILTER_NONE)
97  s->filters |= 1 << s->outfilter;
98 
99  r = s->rgba_color[0];
100  g = s->rgba_color[1];
101  b = s->rgba_color[2];
102  s->yuv_color[0] = (( 66*r + 129*g + 25*b + (1<<7)) >> 8) + 16;
103  s->yuv_color[1] = ((-38*r + -74*g + 112*b + (1<<7)) >> 8) + 128;
104  s->yuv_color[2] = ((112*r + -94*g + -18*b + (1<<7)) >> 8) + 128;
105  return 0;
106 }
107 
109 {
110  SignalstatsContext *s = ctx->priv;
111  av_frame_free(&s->frame_prev);
112  av_frame_free(&s->frame_sat);
113  av_frame_free(&s->frame_hue);
114  av_freep(&s->jobs_rets);
115  av_freep(&s->histy);
116  av_freep(&s->histu);
117  av_freep(&s->histv);
118  av_freep(&s->histsat);
119 }
120 
121 // TODO: add more
122 static const enum AVPixelFormat pix_fmts[] = {
135 };
136 
137 static AVFrame *alloc_frame(enum AVPixelFormat pixfmt, int w, int h)
138 {
140  if (!frame)
141  return NULL;
142 
143  frame->format = pixfmt;
144  frame->width = w;
145  frame->height = h;
146 
147  if (av_frame_get_buffer(frame, 0) < 0) {
149  return NULL;
150  }
151 
152  return frame;
153 }
154 
155 static int config_output(AVFilterLink *outlink)
156 {
157  AVFilterContext *ctx = outlink->src;
158  SignalstatsContext *s = ctx->priv;
159  AVFilterLink *inlink = outlink->src->inputs[0];
161  s->hsub = desc->log2_chroma_w;
162  s->vsub = desc->log2_chroma_h;
163  s->depth = desc->comp[0].depth;
164  s->maxsize = 1 << s->depth;
165  s->histy = av_malloc_array(s->maxsize, sizeof(*s->histy));
166  s->histu = av_malloc_array(s->maxsize, sizeof(*s->histu));
167  s->histv = av_malloc_array(s->maxsize, sizeof(*s->histv));
168  s->histsat = av_malloc_array(s->maxsize, sizeof(*s->histsat));
169 
170  if (!s->histy || !s->histu || !s->histv || !s->histsat)
171  return AVERROR(ENOMEM);
172 
173  outlink->w = inlink->w;
174  outlink->h = inlink->h;
175 
176  s->chromaw = AV_CEIL_RSHIFT(inlink->w, s->hsub);
177  s->chromah = AV_CEIL_RSHIFT(inlink->h, s->vsub);
178 
179  s->fs = inlink->w * inlink->h;
180  s->cfs = s->chromaw * s->chromah;
181 
182  s->nb_jobs = FFMAX(1, FFMIN(inlink->h, ff_filter_get_nb_threads(ctx)));
183  s->jobs_rets = av_malloc_array(s->nb_jobs, sizeof(*s->jobs_rets));
184  if (!s->jobs_rets)
185  return AVERROR(ENOMEM);
186 
187  s->frame_sat = alloc_frame(s->depth > 8 ? AV_PIX_FMT_GRAY16 : AV_PIX_FMT_GRAY8, inlink->w, inlink->h);
188  s->frame_hue = alloc_frame(AV_PIX_FMT_GRAY16, inlink->w, inlink->h);
189  if (!s->frame_sat || !s->frame_hue)
190  return AVERROR(ENOMEM);
191 
192  return 0;
193 }
194 
195 static void burn_frame8(const SignalstatsContext *s, AVFrame *f, int x, int y)
196 {
197  const int chromax = x >> s->hsub;
198  const int chromay = y >> s->vsub;
199  f->data[0][y * f->linesize[0] + x] = s->yuv_color[0];
200  f->data[1][chromay * f->linesize[1] + chromax] = s->yuv_color[1];
201  f->data[2][chromay * f->linesize[2] + chromax] = s->yuv_color[2];
202 }
203 
204 static void burn_frame16(const SignalstatsContext *s, AVFrame *f, int x, int y)
205 {
206  const int chromax = x >> s->hsub;
207  const int chromay = y >> s->vsub;
208  const int mult = 1 << (s->depth - 8);
209  AV_WN16(f->data[0] + y * f->linesize[0] + x * 2, s->yuv_color[0] * mult);
210  AV_WN16(f->data[1] + chromay * f->linesize[1] + chromax * 2, s->yuv_color[1] * mult);
211  AV_WN16(f->data[2] + chromay * f->linesize[2] + chromax * 2, s->yuv_color[2] * mult);
212 }
213 
214 static int filter8_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
215 {
216  ThreadData *td = arg;
217  const SignalstatsContext *s = ctx->priv;
218  const AVFrame *in = td->in;
219  AVFrame *out = td->out;
220  const int w = in->width;
221  const int h = in->height;
222  const int slice_start = (h * jobnr ) / nb_jobs;
223  const int slice_end = (h * (jobnr+1)) / nb_jobs;
224  int x, y, score = 0;
225 
226  for (y = slice_start; y < slice_end; y++) {
227  const int yc = y >> s->vsub;
228  const uint8_t *pluma = &in->data[0][y * in->linesize[0]];
229  const uint8_t *pchromau = &in->data[1][yc * in->linesize[1]];
230  const uint8_t *pchromav = &in->data[2][yc * in->linesize[2]];
231 
232  for (x = 0; x < w; x++) {
233  const int xc = x >> s->hsub;
234  const int luma = pluma[x];
235  const int chromau = pchromau[xc];
236  const int chromav = pchromav[xc];
237  const int filt = luma < 16 || luma > 235 ||
238  chromau < 16 || chromau > 240 ||
239  chromav < 16 || chromav > 240;
240  score += filt;
241  if (out && filt)
242  burn_frame8(s, out, x, y);
243  }
244  }
245  return score;
246 }
247 
248 static int filter16_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
249 {
250  ThreadData *td = arg;
251  const SignalstatsContext *s = ctx->priv;
252  const AVFrame *in = td->in;
253  AVFrame *out = td->out;
254  const int mult = 1 << (s->depth - 8);
255  const int w = in->width;
256  const int h = in->height;
257  const int slice_start = (h * jobnr ) / nb_jobs;
258  const int slice_end = (h * (jobnr+1)) / nb_jobs;
259  int x, y, score = 0;
260 
261  for (y = slice_start; y < slice_end; y++) {
262  const int yc = y >> s->vsub;
263  const uint16_t *pluma = (uint16_t *)&in->data[0][y * in->linesize[0]];
264  const uint16_t *pchromau = (uint16_t *)&in->data[1][yc * in->linesize[1]];
265  const uint16_t *pchromav = (uint16_t *)&in->data[2][yc * in->linesize[2]];
266 
267  for (x = 0; x < w; x++) {
268  const int xc = x >> s->hsub;
269  const int luma = pluma[x];
270  const int chromau = pchromau[xc];
271  const int chromav = pchromav[xc];
272  const int filt = luma < 16 * mult || luma > 235 * mult ||
273  chromau < 16 * mult || chromau > 240 * mult ||
274  chromav < 16 * mult || chromav > 240 * mult;
275  score += filt;
276  if (out && filt)
277  burn_frame16(s, out, x, y);
278  }
279  }
280  return score;
281 }
282 
283 static int filter_tout_outlier(uint8_t x, uint8_t y, uint8_t z)
284 {
285  return ((abs(x - y) + abs (z - y)) / 2) - abs(z - x) > 4; // make 4 configurable?
286 }
287 
288 static int filter8_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
289 {
290  ThreadData *td = arg;
291  const SignalstatsContext *s = ctx->priv;
292  const AVFrame *in = td->in;
293  AVFrame *out = td->out;
294  const int w = in->width;
295  const int h = in->height;
296  const int slice_start = (h * jobnr ) / nb_jobs;
297  const int slice_end = (h * (jobnr+1)) / nb_jobs;
298  const uint8_t *p = in->data[0];
299  int lw = in->linesize[0];
300  int x, y, score = 0, filt;
301 
302  for (y = slice_start; y < slice_end; y++) {
303 
304  if (y - 1 < 0 || y + 1 >= h)
305  continue;
306 
307  // detect two pixels above and below (to eliminate interlace artefacts)
308  // should check that video format is infact interlaced.
309 
310 #define FILTER(i, j) \
311  filter_tout_outlier(p[(y-j) * lw + x + i], \
312  p[ y * lw + x + i], \
313  p[(y+j) * lw + x + i])
314 
315 #define FILTER3(j) (FILTER(-1, j) && FILTER(0, j) && FILTER(1, j))
316 
317  if (y - 2 >= 0 && y + 2 < h) {
318  for (x = 1; x < w - 1; x++) {
319  filt = FILTER3(2) && FILTER3(1);
320  score += filt;
321  if (filt && out)
322  burn_frame8(s, out, x, y);
323  }
324  } else {
325  for (x = 1; x < w - 1; x++) {
326  filt = FILTER3(1);
327  score += filt;
328  if (filt && out)
329  burn_frame8(s, out, x, y);
330  }
331  }
332  }
333  return score;
334 }
335 
336 static int filter16_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
337 {
338  ThreadData *td = arg;
339  const SignalstatsContext *s = ctx->priv;
340  const AVFrame *in = td->in;
341  AVFrame *out = td->out;
342  const int w = in->width;
343  const int h = in->height;
344  const int slice_start = (h * jobnr ) / nb_jobs;
345  const int slice_end = (h * (jobnr+1)) / nb_jobs;
346  const uint16_t *p = (uint16_t *)in->data[0];
347  int lw = in->linesize[0] / 2;
348  int x, y, score = 0, filt;
349 
350  for (y = slice_start; y < slice_end; y++) {
351 
352  if (y - 1 < 0 || y + 1 >= h)
353  continue;
354 
355  // detect two pixels above and below (to eliminate interlace artefacts)
356  // should check that video format is infact interlaced.
357 
358  if (y - 2 >= 0 && y + 2 < h) {
359  for (x = 1; x < w - 1; x++) {
360  filt = FILTER3(2) && FILTER3(1);
361  score += filt;
362  if (filt && out)
363  burn_frame16(s, out, x, y);
364  }
365  } else {
366  for (x = 1; x < w - 1; x++) {
367  filt = FILTER3(1);
368  score += filt;
369  if (filt && out)
370  burn_frame16(s, out, x, y);
371  }
372  }
373  }
374  return score;
375 }
376 
377 #define VREP_START 4
378 
379 static int filter8_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
380 {
381  ThreadData *td = arg;
382  const SignalstatsContext *s = ctx->priv;
383  const AVFrame *in = td->in;
384  AVFrame *out = td->out;
385  const int w = in->width;
386  const int h = in->height;
387  const int slice_start = (h * jobnr ) / nb_jobs;
388  const int slice_end = (h * (jobnr+1)) / nb_jobs;
389  const uint8_t *p = in->data[0];
390  const int lw = in->linesize[0];
391  int x, y, score = 0;
392 
393  for (y = slice_start; y < slice_end; y++) {
394  const int y2lw = (y - VREP_START) * lw;
395  const int ylw = y * lw;
396  int filt, totdiff = 0;
397 
398  if (y < VREP_START)
399  continue;
400 
401  for (x = 0; x < w; x++)
402  totdiff += abs(p[y2lw + x] - p[ylw + x]);
403  filt = totdiff < w;
404 
405  score += filt;
406  if (filt && out)
407  for (x = 0; x < w; x++)
408  burn_frame8(s, out, x, y);
409  }
410  return score * w;
411 }
412 
413 static int filter16_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
414 {
415  ThreadData *td = arg;
416  const SignalstatsContext *s = ctx->priv;
417  const AVFrame *in = td->in;
418  AVFrame *out = td->out;
419  const int w = in->width;
420  const int h = in->height;
421  const int slice_start = (h * jobnr ) / nb_jobs;
422  const int slice_end = (h * (jobnr+1)) / nb_jobs;
423  const uint16_t *p = (uint16_t *)in->data[0];
424  const int lw = in->linesize[0] / 2;
425  int x, y, score = 0;
426 
427  for (y = slice_start; y < slice_end; y++) {
428  const int y2lw = (y - VREP_START) * lw;
429  const int ylw = y * lw;
430  int64_t totdiff = 0;
431  int filt;
432 
433  if (y < VREP_START)
434  continue;
435 
436  for (x = 0; x < w; x++)
437  totdiff += abs(p[y2lw + x] - p[ylw + x]);
438  filt = totdiff < w;
439 
440  score += filt;
441  if (filt && out)
442  for (x = 0; x < w; x++)
443  burn_frame16(s, out, x, y);
444  }
445  return score * w;
446 }
447 
448 static const struct {
449  const char *name;
450  int (*process8)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
451  int (*process16)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
452 } filters_def[] = {
453  {"TOUT", filter8_tout, filter16_tout},
454  {"VREP", filter8_vrep, filter16_vrep},
455  {"BRNG", filter8_brng, filter16_brng},
456  {NULL}
457 };
458 
459 static int compute_sat_hue_metrics8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
460 {
461  int i, j;
463  const SignalstatsContext *s = ctx->priv;
464  const AVFrame *src = td->src;
465  AVFrame *dst_sat = td->dst_sat;
466  AVFrame *dst_hue = td->dst_hue;
467 
468  const int slice_start = (s->chromah * jobnr ) / nb_jobs;
469  const int slice_end = (s->chromah * (jobnr+1)) / nb_jobs;
470 
471  const int lsz_u = src->linesize[1];
472  const int lsz_v = src->linesize[2];
473  const uint8_t *p_u = src->data[1] + slice_start * lsz_u;
474  const uint8_t *p_v = src->data[2] + slice_start * lsz_v;
475 
476  const int lsz_sat = dst_sat->linesize[0];
477  const int lsz_hue = dst_hue->linesize[0];
478  uint8_t *p_sat = dst_sat->data[0] + slice_start * lsz_sat;
479  uint8_t *p_hue = dst_hue->data[0] + slice_start * lsz_hue;
480 
481  for (j = slice_start; j < slice_end; j++) {
482  for (i = 0; i < s->chromaw; i++) {
483  const int yuvu = p_u[i];
484  const int yuvv = p_v[i];
485  p_sat[i] = hypotf(yuvu - 128, yuvv - 128); // int or round?
486  ((int16_t*)p_hue)[i] = fmodf(floorf((180.f / M_PI) * atan2f(yuvu-128, yuvv-128) + 180.f), 360.f);
487  }
488  p_u += lsz_u;
489  p_v += lsz_v;
490  p_sat += lsz_sat;
491  p_hue += lsz_hue;
492  }
493 
494  return 0;
495 }
496 
497 static int compute_sat_hue_metrics16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
498 {
499  int i, j;
501  const SignalstatsContext *s = ctx->priv;
502  const AVFrame *src = td->src;
503  AVFrame *dst_sat = td->dst_sat;
504  AVFrame *dst_hue = td->dst_hue;
505  const int mid = 1 << (s->depth - 1);
506 
507  const int slice_start = (s->chromah * jobnr ) / nb_jobs;
508  const int slice_end = (s->chromah * (jobnr+1)) / nb_jobs;
509 
510  const int lsz_u = src->linesize[1] / 2;
511  const int lsz_v = src->linesize[2] / 2;
512  const uint16_t *p_u = (uint16_t*)src->data[1] + slice_start * lsz_u;
513  const uint16_t *p_v = (uint16_t*)src->data[2] + slice_start * lsz_v;
514 
515  const int lsz_sat = dst_sat->linesize[0] / 2;
516  const int lsz_hue = dst_hue->linesize[0] / 2;
517  uint16_t *p_sat = (uint16_t*)dst_sat->data[0] + slice_start * lsz_sat;
518  uint16_t *p_hue = (uint16_t*)dst_hue->data[0] + slice_start * lsz_hue;
519 
520  for (j = slice_start; j < slice_end; j++) {
521  for (i = 0; i < s->chromaw; i++) {
522  const int yuvu = p_u[i];
523  const int yuvv = p_v[i];
524  p_sat[i] = hypotf(yuvu - mid, yuvv - mid); // int or round?
525  ((int16_t*)p_hue)[i] = fmodf(floorf((180.f / M_PI) * atan2f(yuvu-mid, yuvv-mid) + 180.f), 360.f);
526  }
527  p_u += lsz_u;
528  p_v += lsz_v;
529  p_sat += lsz_sat;
530  p_hue += lsz_hue;
531  }
532 
533  return 0;
534 }
535 
536 static unsigned compute_bit_depth(uint16_t mask)
537 {
538  return av_popcount(mask);
539 }
540 
542 {
543  AVFilterContext *ctx = link->dst;
544  SignalstatsContext *s = ctx->priv;
545  AVFilterLink *outlink = ctx->outputs[0];
546  AVFrame *out = in;
547  int w = 0, cw = 0, // in
548  pw = 0, cpw = 0; // prev
549  int fil;
550  char metabuf[128];
551  unsigned int *histy = s->histy,
552  *histu = s->histu,
553  *histv = s->histv,
554  histhue[360] = {0},
555  *histsat = s->histsat;
556  int miny = -1, minu = -1, minv = -1;
557  int maxy = -1, maxu = -1, maxv = -1;
558  int lowy = -1, lowu = -1, lowv = -1;
559  int highy = -1, highu = -1, highv = -1;
560  int minsat = -1, maxsat = -1, lowsat = -1, highsat = -1;
561  int lowp, highp, clowp, chighp;
562  int accy, accu, accv;
563  int accsat, acchue = 0;
564  int medhue, maxhue;
565  int64_t toty = 0, totu = 0, totv = 0, totsat=0;
566  int64_t tothue = 0;
567  int64_t dify = 0, difu = 0, difv = 0;
568  uint16_t masky = 0, masku = 0, maskv = 0;
569 
570  int filtot[FILT_NUMB] = {0};
571  AVFrame *prev;
572  int ret;
573  AVFrame *sat = s->frame_sat;
574  AVFrame *hue = s->frame_hue;
575  const int hbd = s->depth > 8;
576  ThreadDataHueSatMetrics td_huesat = {
577  .src = in,
578  .dst_sat = sat,
579  .dst_hue = hue,
580  };
581 
582  if (!s->frame_prev)
583  s->frame_prev = av_frame_clone(in);
584 
585  prev = s->frame_prev;
586 
587  if (s->outfilter != FILTER_NONE) {
588  out = av_frame_clone(in);
589  if (!out) {
590  av_frame_free(&in);
591  return AVERROR(ENOMEM);
592  }
594  if (ret < 0) {
595  av_frame_free(&out);
596  av_frame_free(&in);
597  return ret;
598  }
599  }
600 
602  : compute_sat_hue_metrics8, &td_huesat,
603  NULL, FFMIN(s->chromah, ff_filter_get_nb_threads(ctx)));
604 
605  memset(s->histy, 0, s->maxsize * sizeof(*s->histy));
606  memset(s->histu, 0, s->maxsize * sizeof(*s->histu));
607  memset(s->histv, 0, s->maxsize * sizeof(*s->histv));
608  memset(s->histsat, 0, s->maxsize * sizeof(*s->histsat));
609 
610  if (hbd) {
611  const uint16_t *p_sat = (uint16_t *)sat->data[0];
612  const uint16_t *p_hue = (uint16_t *)hue->data[0];
613  const int lsz_sat = sat->linesize[0] / 2;
614  const int lsz_hue = hue->linesize[0] / 2;
615  // Calculate luma histogram and difference with previous frame or field.
616  for (int j = 0; j < link->h; j++) {
617  for (int i = 0; i < link->w; i++) {
618  const int yuv = AV_RN16(in->data[0] + w + i * 2);
619 
620  masky |= yuv;
621  histy[yuv]++;
622  dify += abs(yuv - (int)AV_RN16(prev->data[0] + pw + i * 2));
623  }
624  w += in->linesize[0];
625  pw += prev->linesize[0];
626  }
627 
628  // Calculate chroma histogram and difference with previous frame or field.
629  for (int j = 0; j < s->chromah; j++) {
630  for (int i = 0; i < s->chromaw; i++) {
631  const int yuvu = AV_RN16(in->data[1] + cw + i * 2);
632  const int yuvv = AV_RN16(in->data[2] + cw + i * 2);
633 
634  masku |= yuvu;
635  maskv |= yuvv;
636  histu[yuvu]++;
637  difu += abs(yuvu - (int)AV_RN16(prev->data[1] + cpw + i * 2));
638  histv[yuvv]++;
639  difv += abs(yuvv - (int)AV_RN16(prev->data[2] + cpw + i * 2));
640 
641  histsat[p_sat[i]]++;
642  histhue[((int16_t*)p_hue)[i]]++;
643  }
644  cw += in->linesize[1];
645  cpw += prev->linesize[1];
646  p_sat += lsz_sat;
647  p_hue += lsz_hue;
648  }
649  } else {
650  const uint8_t *p_sat = sat->data[0];
651  const uint8_t *p_hue = hue->data[0];
652  const int lsz_sat = sat->linesize[0];
653  const int lsz_hue = hue->linesize[0];
654  // Calculate luma histogram and difference with previous frame or field.
655  for (int j = 0; j < link->h; j++) {
656  for (int i = 0; i < link->w; i++) {
657  const int yuv = in->data[0][w + i];
658 
659  masky |= yuv;
660  histy[yuv]++;
661  dify += abs(yuv - prev->data[0][pw + i]);
662  }
663  w += in->linesize[0];
664  pw += prev->linesize[0];
665  }
666 
667  // Calculate chroma histogram and difference with previous frame or field.
668  for (int j = 0; j < s->chromah; j++) {
669  for (int i = 0; i < s->chromaw; i++) {
670  const int yuvu = in->data[1][cw+i];
671  const int yuvv = in->data[2][cw+i];
672 
673  masku |= yuvu;
674  maskv |= yuvv;
675  histu[yuvu]++;
676  difu += abs(yuvu - prev->data[1][cpw+i]);
677  histv[yuvv]++;
678  difv += abs(yuvv - prev->data[2][cpw+i]);
679 
680  histsat[p_sat[i]]++;
681  histhue[((int16_t*)p_hue)[i]]++;
682  }
683  cw += in->linesize[1];
684  cpw += prev->linesize[1];
685  p_sat += lsz_sat;
686  p_hue += lsz_hue;
687  }
688  }
689 
690  for (fil = 0; fil < FILT_NUMB; fil ++) {
691  if (s->filters & 1<<fil) {
692  ThreadData td = {
693  .in = in,
694  .out = out != in && s->outfilter == fil ? out : NULL,
695  };
696  memset(s->jobs_rets, 0, s->nb_jobs * sizeof(*s->jobs_rets));
698  &td, s->jobs_rets, s->nb_jobs);
699  for (int i = 0; i < s->nb_jobs; i++)
700  filtot[fil] += s->jobs_rets[i];
701  }
702  }
703 
704  // find low / high based on histogram percentile
705  // these only need to be calculated once.
706 
707  lowp = lrint(s->fs * 10 / 100.);
708  highp = lrint(s->fs * 90 / 100.);
709  clowp = lrint(s->cfs * 10 / 100.);
710  chighp = lrint(s->cfs * 90 / 100.);
711 
712  accy = accu = accv = accsat = 0;
713  for (fil = 0; fil < s->maxsize; fil++) {
714  if (miny < 0 && histy[fil]) miny = fil;
715  if (minu < 0 && histu[fil]) minu = fil;
716  if (minv < 0 && histv[fil]) minv = fil;
717  if (minsat < 0 && histsat[fil]) minsat = fil;
718 
719  if (histy[fil]) maxy = fil;
720  if (histu[fil]) maxu = fil;
721  if (histv[fil]) maxv = fil;
722  if (histsat[fil]) maxsat = fil;
723 
724  toty += (uint64_t)histy[fil] * fil;
725  totu += (uint64_t)histu[fil] * fil;
726  totv += (uint64_t)histv[fil] * fil;
727  totsat += (uint64_t)histsat[fil] * fil;
728 
729  accy += histy[fil];
730  accu += histu[fil];
731  accv += histv[fil];
732  accsat += histsat[fil];
733 
734  if (lowy == -1 && accy >= lowp) lowy = fil;
735  if (lowu == -1 && accu >= clowp) lowu = fil;
736  if (lowv == -1 && accv >= clowp) lowv = fil;
737  if (lowsat == -1 && accsat >= clowp) lowsat = fil;
738 
739  if (highy == -1 && accy >= highp) highy = fil;
740  if (highu == -1 && accu >= chighp) highu = fil;
741  if (highv == -1 && accv >= chighp) highv = fil;
742  if (highsat == -1 && accsat >= chighp) highsat = fil;
743  }
744 
745  maxhue = histhue[0];
746  medhue = -1;
747  for (fil = 0; fil < 360; fil++) {
748  tothue += (uint64_t)histhue[fil] * fil;
749  acchue += histhue[fil];
750 
751  if (medhue == -1 && acchue > s->cfs / 2)
752  medhue = fil;
753  if (histhue[fil] > maxhue) {
754  maxhue = histhue[fil];
755  }
756  }
757 
758  av_frame_free(&s->frame_prev);
759  s->frame_prev = av_frame_clone(in);
760 
761 #define SET_META(key, fmt, val) do { \
762  snprintf(metabuf, sizeof(metabuf), fmt, val); \
763  av_dict_set(&out->metadata, "lavfi.signalstats." key, metabuf, 0); \
764 } while (0)
765 
766  av_dict_set_int(&out->metadata, "lavfi.signalstats.YMIN", miny, 0);
767  av_dict_set_int(&out->metadata, "lavfi.signalstats.YLOW", lowy, 0);
768  SET_META("YAVG", "%g", 1.0 * toty / s->fs);
769  av_dict_set_int(&out->metadata, "lavfi.signalstats.YHIGH", highy, 0);
770  av_dict_set_int(&out->metadata, "lavfi.signalstats.YMAX", maxy, 0);
771 
772  av_dict_set_int(&out->metadata, "lavfi.signalstats.UMIN", minu, 0);
773  av_dict_set_int(&out->metadata, "lavfi.signalstats.ULOW", lowu, 0);
774  SET_META("UAVG", "%g", 1.0 * totu / s->cfs);
775  av_dict_set_int(&out->metadata, "lavfi.signalstats.UHIGH", highu, 0);
776  av_dict_set_int(&out->metadata, "lavfi.signalstats.UMAX", maxu, 0);
777 
778  av_dict_set_int(&out->metadata, "lavfi.signalstats.VMIN", minv, 0);
779  av_dict_set_int(&out->metadata, "lavfi.signalstats.VLOW", lowv, 0);
780  SET_META("VAVG", "%g", 1.0 * totv / s->cfs);
781  av_dict_set_int(&out->metadata, "lavfi.signalstats.VHIGH", highv, 0);
782  av_dict_set_int(&out->metadata, "lavfi.signalstats.VMAX", maxv, 0);
783 
784  av_dict_set_int(&out->metadata, "lavfi.signalstats.SATMIN", minsat, 0);
785  av_dict_set_int(&out->metadata, "lavfi.signalstats.SATLOW", lowsat, 0);
786  SET_META("SATAVG", "%g", 1.0 * totsat / s->cfs);
787  av_dict_set_int(&out->metadata, "lavfi.signalstats.SATHIGH", highsat, 0);
788  av_dict_set_int(&out->metadata, "lavfi.signalstats.SATMAX", maxsat, 0);
789 
790  av_dict_set_int(&out->metadata, "lavfi.signalstats.HUEMED", medhue, 0);
791  SET_META("HUEAVG", "%g", 1.0 * tothue / s->cfs);
792 
793  SET_META("YDIF", "%g", 1.0 * dify / s->fs);
794  SET_META("UDIF", "%g", 1.0 * difu / s->cfs);
795  SET_META("VDIF", "%g", 1.0 * difv / s->cfs);
796 
797  av_dict_set_int(&out->metadata, "lavfi.signalstats.YBITDEPTH", compute_bit_depth(masky), 0);
798  av_dict_set_int(&out->metadata, "lavfi.signalstats.UBITDEPTH", compute_bit_depth(masku), 0);
799  av_dict_set_int(&out->metadata, "lavfi.signalstats.VBITDEPTH", compute_bit_depth(maskv), 0);
800 
801  for (fil = 0; fil < FILT_NUMB; fil ++) {
802  if (s->filters & 1<<fil) {
803  char metaname[128];
804  snprintf(metabuf, sizeof(metabuf), "%g", 1.0 * filtot[fil] / s->fs);
805  snprintf(metaname, sizeof(metaname), "lavfi.signalstats.%s", filters_def[fil].name);
806  av_dict_set(&out->metadata, metaname, metabuf, 0);
807  }
808  }
809 
810  if (in != out)
811  av_frame_free(&in);
812  return ff_filter_frame(outlink, out);
813 }
814 
815 static const AVFilterPad signalstats_inputs[] = {
816  {
817  .name = "default",
818  .type = AVMEDIA_TYPE_VIDEO,
819  .filter_frame = filter_frame,
820  },
821 };
822 
824  {
825  .name = "default",
826  .config_props = config_output,
827  .type = AVMEDIA_TYPE_VIDEO,
828  },
829 };
830 
832  .name = "signalstats",
833  .description = "Generate statistics from video analysis.",
834  .init = init,
835  .uninit = uninit,
836  .priv_size = sizeof(SignalstatsContext),
840  .priv_class = &signalstats_class,
842 };
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
r
const char * r
Definition: vf_curves.c:127
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
compute_sat_hue_metrics16
static int compute_sat_hue_metrics16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:497
SignalstatsContext::vsub
int vsub
Definition: vf_signalstats.c:43
out
FILE * out
Definition: movenc.c:55
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:288
SignalstatsContext::rgba_color
uint8_t rgba_color[4]
Definition: vf_signalstats.c:50
SignalstatsContext::chromah
int chromah
Definition: vf_signalstats.c:40
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1015
init
static av_cold int init(AVFilterContext *ctx)
Definition: vf_signalstats.c:91
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2965
SET_META
#define SET_META(key, fmt, val)
FILTER_PIXFMTS_ARRAY
#define FILTER_PIXFMTS_ARRAY(array)
Definition: internal.h:162
floorf
static __device__ float floorf(float a)
Definition: cuda_runtime.h:172
filter8_brng
static int filter8_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:214
atan2f
#define atan2f(y, x)
Definition: libm.h:45
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
AV_RN16
#define AV_RN16(p)
Definition: intreadwrite.h:354
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:160
SignalstatsContext::fs
int fs
Definition: vf_signalstats.c:45
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:374
pixdesc.h
AVFrame::width
int width
Definition: frame.h:446
SignalstatsContext::nb_jobs
int nb_jobs
Definition: vf_signalstats.c:52
w
uint8_t w
Definition: llviddspenc.c:38
AVOption
AVOption.
Definition: opt.h:357
b
#define b
Definition: input.c:41
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:478
VREP_START
#define VREP_START
Definition: vf_signalstats.c:377
SignalstatsContext::histv
int * histv
Definition: vf_signalstats.c:56
ThreadData::in
const AVFrame * in
Definition: vf_signalstats.c:63
AV_PIX_FMT_YUV440P
@ AV_PIX_FMT_YUV440P
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:106
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(signalstats)
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:170
av_popcount
#define av_popcount
Definition: common.h:154
ThreadData::out
AVFrame * out
Definition: af_adeclick.c:527
ThreadData::in
AVFrame * in
Definition: af_adecorrelate.c:154
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:395
FLAGS
#define FLAGS
Definition: vf_signalstats.c:73
FilterMode
FilterMode
Definition: vp9.h:64
slice_end
static int slice_end(AVCodecContext *avctx, AVFrame *pict, int *got_output)
Handle slice ends.
Definition: mpeg12dec.c:1721
AV_PIX_FMT_YUV422P9
#define AV_PIX_FMT_YUV422P9
Definition: pixfmt.h:476
SignalstatsContext::histu
int * histu
Definition: vf_signalstats.c:56
filter16_tout
static int filter16_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:336
AV_PIX_FMT_GRAY16
#define AV_PIX_FMT_GRAY16
Definition: pixfmt.h:462
filter_tout_outlier
static int filter_tout_outlier(uint8_t x, uint8_t y, uint8_t z)
Definition: vf_signalstats.c:283
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: vf_signalstats.c:122
FILT_NUMB
@ FILT_NUMB
Definition: vf_signalstats.c:35
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:33
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:148
AV_PIX_FMT_YUV444P10
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:481
AV_PIX_FMT_YUVJ411P
@ AV_PIX_FMT_YUVJ411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor ...
Definition: pixfmt.h:283
SignalstatsContext
Definition: vf_signalstats.c:38
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_signalstats.c:108
mult
static int16_t mult(Float11 *f1, Float11 *f2)
Definition: g726.c:60
lrint
#define lrint
Definition: tablegen.h:53
FILTER_VREP
@ FILTER_VREP
Definition: vf_signalstats.c:33
av_cold
#define av_cold
Definition: attributes.h:90
AV_PIX_FMT_YUV422P16
#define AV_PIX_FMT_YUV422P16
Definition: pixfmt.h:490
mask
static const uint16_t mask[17]
Definition: lzw.c:38
AV_PIX_FMT_YUVJ422P
@ AV_PIX_FMT_YUVJ422P
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:86
alloc_frame
static AVFrame * alloc_frame(enum AVPixelFormat pixfmt, int w, int h)
Definition: vf_signalstats.c:137
SignalstatsContext::yuv_color
int yuv_color[3]
Definition: vf_signalstats.c:51
OFFSET
#define OFFSET(x)
Definition: vf_signalstats.c:72
SignalstatsContext::histsat
int * histsat
Definition: vf_signalstats.c:56
intreadwrite.h
s
#define s(width, name)
Definition: cbs_vp9.c:198
AV_PIX_FMT_YUV444P16
#define AV_PIX_FMT_YUV444P16
Definition: pixfmt.h:491
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:60
g
const char * g
Definition: vf_curves.c:128
filters
#define filters(fmt, type, inverse, clp, inverset, clip, one, clip_fn, packed)
Definition: af_crystalizer.c:54
filters.h
AV_PIX_FMT_YUV420P9
#define AV_PIX_FMT_YUV420P9
Definition: pixfmt.h:475
AV_PIX_FMT_YUV420P16
#define AV_PIX_FMT_YUV420P16
Definition: pixfmt.h:489
ctx
AVFormatContext * ctx
Definition: movenc.c:49
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:593
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: internal.h:182
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
AV_PIX_FMT_YUVJ444P
@ AV_PIX_FMT_YUVJ444P
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:87
ff_inlink_make_frame_writable
int ff_inlink_make_frame_writable(AVFilterLink *link, AVFrame **rframe)
Make sure a frame is writable.
Definition: avfilter.c:1489
arg
const char * arg
Definition: jacosubdec.c:67
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
NULL
#define NULL
Definition: coverity.c:32
FILTER_BRNG
@ FILTER_BRNG
Definition: vf_signalstats.c:34
burn_frame16
static void burn_frame16(const SignalstatsContext *s, AVFrame *f, int x, int y)
Definition: vf_signalstats.c:204
filters_def
static const struct @334 filters_def[]
filter8_tout
static int filter8_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:288
ThreadDataHueSatMetrics::dst_hue
AVFrame * dst_hue
Definition: vf_signalstats.c:69
SignalstatsContext::outfilter
int outfilter
Definition: vf_signalstats.c:47
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:85
AV_OPT_TYPE_COLOR
@ AV_OPT_TYPE_COLOR
Definition: opt.h:260
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:415
AV_PIX_FMT_YUV440P10
#define AV_PIX_FMT_YUV440P10
Definition: pixfmt.h:480
abs
#define abs(x)
Definition: cuda_runtime.h:35
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:479
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:81
ff_vf_signalstats
const AVFilter ff_vf_signalstats
Definition: vf_signalstats.c:831
SignalstatsContext::hsub
int hsub
Definition: vf_signalstats.c:42
name
const char * name
Definition: vf_signalstats.c:449
signalstats_inputs
static const AVFilterPad signalstats_inputs[]
Definition: vf_signalstats.c:815
burn_frame8
static void burn_frame8(const SignalstatsContext *s, AVFrame *f, int x, int y)
Definition: vf_signalstats.c:195
f
f
Definition: af_crystalizer.c:121
filter_frame
static int filter_frame(AVFilterLink *link, AVFrame *in)
Definition: vf_signalstats.c:541
SignalstatsContext::histy
int * histy
Definition: vf_signalstats.c:56
for
for(k=2;k<=8;++k)
Definition: h264pred_template.c:425
AV_PIX_FMT_YUV422P12
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:483
FILTER3
#define FILTER3(j)
AV_PIX_FMT_YUV444P12
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:485
signalstats_outputs
static const AVFilterPad signalstats_outputs[]
Definition: vf_signalstats.c:823
ThreadDataHueSatMetrics::dst_sat
AVFrame * dst_sat
Definition: vf_signalstats.c:69
ThreadDataHueSatMetrics
Definition: vf_signalstats.c:67
M_PI
#define M_PI
Definition: mathematics.h:67
internal.h
compute_sat_hue_metrics8
static int compute_sat_hue_metrics8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:459
SignalstatsContext::filters
int filters
Definition: vf_signalstats.c:48
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
process8
int(* process8)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:450
FILTER_TOUT
@ FILTER_TOUT
Definition: vf_signalstats.c:32
SignalstatsContext::frame_prev
AVFrame * frame_prev
Definition: vf_signalstats.c:49
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:31
ff_filter_get_nb_threads
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:827
SignalstatsContext::maxsize
int maxsize
Definition: vf_signalstats.c:55
config_output
static int config_output(AVFilterLink *outlink)
Definition: vf_signalstats.c:155
ThreadData
Used for passing data between threads.
Definition: dsddec.c:71
process16
int(* process16)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:451
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
AV_PIX_FMT_YUVJ440P
@ AV_PIX_FMT_YUVJ440P
planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range
Definition: pixfmt.h:107
SignalstatsContext::depth
int depth
Definition: vf_signalstats.c:44
filt
static const int8_t filt[NUMTAPS *2]
Definition: af_earwax.c:39
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:39
SignalstatsContext::chromaw
int chromaw
Definition: vf_signalstats.c:41
AV_PIX_FMT_YUV444P9
#define AV_PIX_FMT_YUV444P9
Definition: pixfmt.h:477
AVFilter
Filter definition.
Definition: avfilter.h:166
ret
ret
Definition: filter_design.txt:187
pixfmt
enum AVPixelFormat pixfmt
Definition: kmsgrab.c:367
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
SignalstatsContext::cfs
int cfs
Definition: vf_signalstats.c:46
AV_PIX_FMT_YUV420P12
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:482
filter16_brng
static int filter16_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:248
AV_PIX_FMT_YUV422P14
#define AV_PIX_FMT_YUV422P14
Definition: pixfmt.h:487
AVFrame::height
int height
Definition: frame.h:446
SignalstatsContext::frame_sat
AVFrame * frame_sat
Definition: vf_signalstats.c:58
FILTER_NONE
@ FILTER_NONE
Definition: vf_signalstats.c:31
signalstats_options
static const AVOption signalstats_options[]
Definition: vf_signalstats.c:75
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:245
filter8_vrep
static int filter8_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:379
slice_start
static int slice_start(SliceContext *sc, VVCContext *s, VVCFrameContext *fc, const CodedBitstreamUnit *unit, const int is_first_slice)
Definition: dec.c:700
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:78
AVFilterContext
An instance of a filter.
Definition: avfilter.h:407
av_dict_set_int
int av_dict_set_int(AVDictionary **pm, const char *key, int64_t value, int flags)
Convenience wrapper for av_dict_set() that converts the value to a string and stores it.
Definition: dict.c:167
AVFILTER_FLAG_SLICE_THREADS
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:117
desc
const char * desc
Definition: libsvtav1.c:79
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:77
mem.h
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
filter16_vrep
static int filter16_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:413
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: internal.h:183
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:88
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
AV_PIX_FMT_YUV411P
@ AV_PIX_FMT_YUV411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:80
AV_OPT_TYPE_FLAGS
@ AV_OPT_TYPE_FLAGS
Definition: opt.h:244
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:419
SignalstatsContext::jobs_rets
int * jobs_rets
Definition: vf_signalstats.c:53
AV_PIX_FMT_YUV440P12
#define AV_PIX_FMT_YUV440P12
Definition: pixfmt.h:484
h
h
Definition: vp9dsp_template.c:2038
AV_PIX_FMT_YUV444P14
#define AV_PIX_FMT_YUV444P14
Definition: pixfmt.h:488
ff_filter_execute
static av_always_inline int ff_filter_execute(AVFilterContext *ctx, avfilter_action_func *func, void *arg, int *ret, int nb_jobs)
Definition: internal.h:134
compute_bit_depth
static unsigned compute_bit_depth(uint16_t mask)
Definition: vf_signalstats.c:536
int
int
Definition: ffmpeg_filter.c:424
SignalstatsContext::frame_hue
AVFrame * frame_hue
Definition: vf_signalstats.c:59
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:254
snprintf
#define snprintf
Definition: snprintf.h:34
ThreadDataHueSatMetrics::src
const AVFrame * src
Definition: vf_signalstats.c:68
AV_PIX_FMT_YUV420P14
#define AV_PIX_FMT_YUV420P14
Definition: pixfmt.h:486
AV_WN16
#define AV_WN16(p, v)
Definition: intreadwrite.h:366