FFmpeg
vf_signalstats.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2010 Mark Heath mjpeg0 @ silicontrip dot org
3  * Copyright (c) 2014 Clément Bœsch
4  * Copyright (c) 2014 Dave Rice @dericed
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #include "libavutil/intreadwrite.h"
24 #include "libavutil/opt.h"
25 #include "libavutil/pixdesc.h"
26 #include "internal.h"
27 
28 enum FilterMode {
34 };
35 
36 typedef struct SignalstatsContext {
37  const AVClass *class;
38  int chromah; // height of chroma plane
39  int chromaw; // width of chroma plane
40  int hsub; // horizontal subsampling
41  int vsub; // vertical subsampling
42  int depth; // pixel depth
43  int fs; // pixel count per frame
44  int cfs; // pixel count per frame of chroma planes
45  int outfilter; // FilterMode
46  int filters;
48  uint8_t rgba_color[4];
49  int yuv_color[3];
50  int nb_jobs;
51  int *jobs_rets;
52 
53  int maxsize; // history stats array size
54  int *histy, *histu, *histv, *histsat;
55 
59 
60 typedef struct ThreadData {
61  const AVFrame *in;
62  AVFrame *out;
63 } ThreadData;
64 
65 typedef struct ThreadDataHueSatMetrics {
66  const AVFrame *src;
69 
70 #define OFFSET(x) offsetof(SignalstatsContext, x)
71 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
72 
73 static const AVOption signalstats_options[] = {
74  {"stat", "set statistics filters", OFFSET(filters), AV_OPT_TYPE_FLAGS, {.i64=0}, 0, INT_MAX, FLAGS, "filters"},
75  {"tout", "analyze pixels for temporal outliers", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_TOUT}, 0, 0, FLAGS, "filters"},
76  {"vrep", "analyze video lines for vertical line repetition", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_VREP}, 0, 0, FLAGS, "filters"},
77  {"brng", "analyze for pixels outside of broadcast range", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_BRNG}, 0, 0, FLAGS, "filters"},
78  {"out", "set video filter", OFFSET(outfilter), AV_OPT_TYPE_INT, {.i64=FILTER_NONE}, -1, FILT_NUMB-1, FLAGS, "out"},
79  {"tout", "highlight pixels that depict temporal outliers", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_TOUT}, 0, 0, FLAGS, "out"},
80  {"vrep", "highlight video lines that depict vertical line repetition", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_VREP}, 0, 0, FLAGS, "out"},
81  {"brng", "highlight pixels that are outside of broadcast range", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_BRNG}, 0, 0, FLAGS, "out"},
82  {"c", "set highlight color", OFFSET(rgba_color), AV_OPT_TYPE_COLOR, {.str="yellow"}, .flags=FLAGS},
83  {"color", "set highlight color", OFFSET(rgba_color), AV_OPT_TYPE_COLOR, {.str="yellow"}, .flags=FLAGS},
84  {NULL}
85 };
86 
87 AVFILTER_DEFINE_CLASS(signalstats);
88 
90 {
91  uint8_t r, g, b;
92  SignalstatsContext *s = ctx->priv;
93 
94  if (s->outfilter != FILTER_NONE)
95  s->filters |= 1 << s->outfilter;
96 
97  r = s->rgba_color[0];
98  g = s->rgba_color[1];
99  b = s->rgba_color[2];
100  s->yuv_color[0] = (( 66*r + 129*g + 25*b + (1<<7)) >> 8) + 16;
101  s->yuv_color[1] = ((-38*r + -74*g + 112*b + (1<<7)) >> 8) + 128;
102  s->yuv_color[2] = ((112*r + -94*g + -18*b + (1<<7)) >> 8) + 128;
103  return 0;
104 }
105 
107 {
108  SignalstatsContext *s = ctx->priv;
109  av_frame_free(&s->frame_prev);
110  av_frame_free(&s->frame_sat);
111  av_frame_free(&s->frame_hue);
112  av_freep(&s->jobs_rets);
113  av_freep(&s->histy);
114  av_freep(&s->histu);
115  av_freep(&s->histv);
116  av_freep(&s->histsat);
117 }
118 
119 // TODO: add more
120 static const enum AVPixelFormat pix_fmts[] = {
133 };
134 
135 static AVFrame *alloc_frame(enum AVPixelFormat pixfmt, int w, int h)
136 {
138  if (!frame)
139  return NULL;
140 
141  frame->format = pixfmt;
142  frame->width = w;
143  frame->height = h;
144 
145  if (av_frame_get_buffer(frame, 0) < 0) {
147  return NULL;
148  }
149 
150  return frame;
151 }
152 
153 static int config_output(AVFilterLink *outlink)
154 {
155  AVFilterContext *ctx = outlink->src;
156  SignalstatsContext *s = ctx->priv;
157  AVFilterLink *inlink = outlink->src->inputs[0];
159  s->hsub = desc->log2_chroma_w;
160  s->vsub = desc->log2_chroma_h;
161  s->depth = desc->comp[0].depth;
162  s->maxsize = 1 << s->depth;
163  s->histy = av_malloc_array(s->maxsize, sizeof(*s->histy));
164  s->histu = av_malloc_array(s->maxsize, sizeof(*s->histu));
165  s->histv = av_malloc_array(s->maxsize, sizeof(*s->histv));
166  s->histsat = av_malloc_array(s->maxsize, sizeof(*s->histsat));
167 
168  if (!s->histy || !s->histu || !s->histv || !s->histsat)
169  return AVERROR(ENOMEM);
170 
171  outlink->w = inlink->w;
172  outlink->h = inlink->h;
173 
174  s->chromaw = AV_CEIL_RSHIFT(inlink->w, s->hsub);
175  s->chromah = AV_CEIL_RSHIFT(inlink->h, s->vsub);
176 
177  s->fs = inlink->w * inlink->h;
178  s->cfs = s->chromaw * s->chromah;
179 
180  s->nb_jobs = FFMAX(1, FFMIN(inlink->h, ff_filter_get_nb_threads(ctx)));
181  s->jobs_rets = av_malloc_array(s->nb_jobs, sizeof(*s->jobs_rets));
182  if (!s->jobs_rets)
183  return AVERROR(ENOMEM);
184 
185  s->frame_sat = alloc_frame(s->depth > 8 ? AV_PIX_FMT_GRAY16 : AV_PIX_FMT_GRAY8, inlink->w, inlink->h);
186  s->frame_hue = alloc_frame(AV_PIX_FMT_GRAY16, inlink->w, inlink->h);
187  if (!s->frame_sat || !s->frame_hue)
188  return AVERROR(ENOMEM);
189 
190  return 0;
191 }
192 
193 static void burn_frame8(const SignalstatsContext *s, AVFrame *f, int x, int y)
194 {
195  const int chromax = x >> s->hsub;
196  const int chromay = y >> s->vsub;
197  f->data[0][y * f->linesize[0] + x] = s->yuv_color[0];
198  f->data[1][chromay * f->linesize[1] + chromax] = s->yuv_color[1];
199  f->data[2][chromay * f->linesize[2] + chromax] = s->yuv_color[2];
200 }
201 
202 static void burn_frame16(const SignalstatsContext *s, AVFrame *f, int x, int y)
203 {
204  const int chromax = x >> s->hsub;
205  const int chromay = y >> s->vsub;
206  const int mult = 1 << (s->depth - 8);
207  AV_WN16(f->data[0] + y * f->linesize[0] + x * 2, s->yuv_color[0] * mult);
208  AV_WN16(f->data[1] + chromay * f->linesize[1] + chromax * 2, s->yuv_color[1] * mult);
209  AV_WN16(f->data[2] + chromay * f->linesize[2] + chromax * 2, s->yuv_color[2] * mult);
210 }
211 
212 static int filter8_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
213 {
214  ThreadData *td = arg;
215  const SignalstatsContext *s = ctx->priv;
216  const AVFrame *in = td->in;
217  AVFrame *out = td->out;
218  const int w = in->width;
219  const int h = in->height;
220  const int slice_start = (h * jobnr ) / nb_jobs;
221  const int slice_end = (h * (jobnr+1)) / nb_jobs;
222  int x, y, score = 0;
223 
224  for (y = slice_start; y < slice_end; y++) {
225  const int yc = y >> s->vsub;
226  const uint8_t *pluma = &in->data[0][y * in->linesize[0]];
227  const uint8_t *pchromau = &in->data[1][yc * in->linesize[1]];
228  const uint8_t *pchromav = &in->data[2][yc * in->linesize[2]];
229 
230  for (x = 0; x < w; x++) {
231  const int xc = x >> s->hsub;
232  const int luma = pluma[x];
233  const int chromau = pchromau[xc];
234  const int chromav = pchromav[xc];
235  const int filt = luma < 16 || luma > 235 ||
236  chromau < 16 || chromau > 240 ||
237  chromav < 16 || chromav > 240;
238  score += filt;
239  if (out && filt)
240  burn_frame8(s, out, x, y);
241  }
242  }
243  return score;
244 }
245 
246 static int filter16_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
247 {
248  ThreadData *td = arg;
249  const SignalstatsContext *s = ctx->priv;
250  const AVFrame *in = td->in;
251  AVFrame *out = td->out;
252  const int mult = 1 << (s->depth - 8);
253  const int w = in->width;
254  const int h = in->height;
255  const int slice_start = (h * jobnr ) / nb_jobs;
256  const int slice_end = (h * (jobnr+1)) / nb_jobs;
257  int x, y, score = 0;
258 
259  for (y = slice_start; y < slice_end; y++) {
260  const int yc = y >> s->vsub;
261  const uint16_t *pluma = (uint16_t *)&in->data[0][y * in->linesize[0]];
262  const uint16_t *pchromau = (uint16_t *)&in->data[1][yc * in->linesize[1]];
263  const uint16_t *pchromav = (uint16_t *)&in->data[2][yc * in->linesize[2]];
264 
265  for (x = 0; x < w; x++) {
266  const int xc = x >> s->hsub;
267  const int luma = pluma[x];
268  const int chromau = pchromau[xc];
269  const int chromav = pchromav[xc];
270  const int filt = luma < 16 * mult || luma > 235 * mult ||
271  chromau < 16 * mult || chromau > 240 * mult ||
272  chromav < 16 * mult || chromav > 240 * mult;
273  score += filt;
274  if (out && filt)
275  burn_frame16(s, out, x, y);
276  }
277  }
278  return score;
279 }
280 
281 static int filter_tout_outlier(uint8_t x, uint8_t y, uint8_t z)
282 {
283  return ((abs(x - y) + abs (z - y)) / 2) - abs(z - x) > 4; // make 4 configurable?
284 }
285 
286 static int filter8_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
287 {
288  ThreadData *td = arg;
289  const SignalstatsContext *s = ctx->priv;
290  const AVFrame *in = td->in;
291  AVFrame *out = td->out;
292  const int w = in->width;
293  const int h = in->height;
294  const int slice_start = (h * jobnr ) / nb_jobs;
295  const int slice_end = (h * (jobnr+1)) / nb_jobs;
296  const uint8_t *p = in->data[0];
297  int lw = in->linesize[0];
298  int x, y, score = 0, filt;
299 
300  for (y = slice_start; y < slice_end; y++) {
301 
302  if (y - 1 < 0 || y + 1 >= h)
303  continue;
304 
305  // detect two pixels above and below (to eliminate interlace artefacts)
306  // should check that video format is infact interlaced.
307 
308 #define FILTER(i, j) \
309  filter_tout_outlier(p[(y-j) * lw + x + i], \
310  p[ y * lw + x + i], \
311  p[(y+j) * lw + x + i])
312 
313 #define FILTER3(j) (FILTER(-1, j) && FILTER(0, j) && FILTER(1, j))
314 
315  if (y - 2 >= 0 && y + 2 < h) {
316  for (x = 1; x < w - 1; x++) {
317  filt = FILTER3(2) && FILTER3(1);
318  score += filt;
319  if (filt && out)
320  burn_frame8(s, out, x, y);
321  }
322  } else {
323  for (x = 1; x < w - 1; x++) {
324  filt = FILTER3(1);
325  score += filt;
326  if (filt && out)
327  burn_frame8(s, out, x, y);
328  }
329  }
330  }
331  return score;
332 }
333 
334 static int filter16_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
335 {
336  ThreadData *td = arg;
337  const SignalstatsContext *s = ctx->priv;
338  const AVFrame *in = td->in;
339  AVFrame *out = td->out;
340  const int w = in->width;
341  const int h = in->height;
342  const int slice_start = (h * jobnr ) / nb_jobs;
343  const int slice_end = (h * (jobnr+1)) / nb_jobs;
344  const uint16_t *p = (uint16_t *)in->data[0];
345  int lw = in->linesize[0] / 2;
346  int x, y, score = 0, filt;
347 
348  for (y = slice_start; y < slice_end; y++) {
349 
350  if (y - 1 < 0 || y + 1 >= h)
351  continue;
352 
353  // detect two pixels above and below (to eliminate interlace artefacts)
354  // should check that video format is infact interlaced.
355 
356  if (y - 2 >= 0 && y + 2 < h) {
357  for (x = 1; x < w - 1; x++) {
358  filt = FILTER3(2) && FILTER3(1);
359  score += filt;
360  if (filt && out)
361  burn_frame16(s, out, x, y);
362  }
363  } else {
364  for (x = 1; x < w - 1; x++) {
365  filt = FILTER3(1);
366  score += filt;
367  if (filt && out)
368  burn_frame16(s, out, x, y);
369  }
370  }
371  }
372  return score;
373 }
374 
375 #define VREP_START 4
376 
377 static int filter8_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
378 {
379  ThreadData *td = arg;
380  const SignalstatsContext *s = ctx->priv;
381  const AVFrame *in = td->in;
382  AVFrame *out = td->out;
383  const int w = in->width;
384  const int h = in->height;
385  const int slice_start = (h * jobnr ) / nb_jobs;
386  const int slice_end = (h * (jobnr+1)) / nb_jobs;
387  const uint8_t *p = in->data[0];
388  const int lw = in->linesize[0];
389  int x, y, score = 0;
390 
391  for (y = slice_start; y < slice_end; y++) {
392  const int y2lw = (y - VREP_START) * lw;
393  const int ylw = y * lw;
394  int filt, totdiff = 0;
395 
396  if (y < VREP_START)
397  continue;
398 
399  for (x = 0; x < w; x++)
400  totdiff += abs(p[y2lw + x] - p[ylw + x]);
401  filt = totdiff < w;
402 
403  score += filt;
404  if (filt && out)
405  for (x = 0; x < w; x++)
406  burn_frame8(s, out, x, y);
407  }
408  return score * w;
409 }
410 
411 static int filter16_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
412 {
413  ThreadData *td = arg;
414  const SignalstatsContext *s = ctx->priv;
415  const AVFrame *in = td->in;
416  AVFrame *out = td->out;
417  const int w = in->width;
418  const int h = in->height;
419  const int slice_start = (h * jobnr ) / nb_jobs;
420  const int slice_end = (h * (jobnr+1)) / nb_jobs;
421  const uint16_t *p = (uint16_t *)in->data[0];
422  const int lw = in->linesize[0] / 2;
423  int x, y, score = 0;
424 
425  for (y = slice_start; y < slice_end; y++) {
426  const int y2lw = (y - VREP_START) * lw;
427  const int ylw = y * lw;
428  int64_t totdiff = 0;
429  int filt;
430 
431  if (y < VREP_START)
432  continue;
433 
434  for (x = 0; x < w; x++)
435  totdiff += abs(p[y2lw + x] - p[ylw + x]);
436  filt = totdiff < w;
437 
438  score += filt;
439  if (filt && out)
440  for (x = 0; x < w; x++)
441  burn_frame16(s, out, x, y);
442  }
443  return score * w;
444 }
445 
446 static const struct {
447  const char *name;
448  int (*process8)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
449  int (*process16)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
450 } filters_def[] = {
451  {"TOUT", filter8_tout, filter16_tout},
452  {"VREP", filter8_vrep, filter16_vrep},
453  {"BRNG", filter8_brng, filter16_brng},
454  {NULL}
455 };
456 
457 static int compute_sat_hue_metrics8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
458 {
459  int i, j;
461  const SignalstatsContext *s = ctx->priv;
462  const AVFrame *src = td->src;
463  AVFrame *dst_sat = td->dst_sat;
464  AVFrame *dst_hue = td->dst_hue;
465 
466  const int slice_start = (s->chromah * jobnr ) / nb_jobs;
467  const int slice_end = (s->chromah * (jobnr+1)) / nb_jobs;
468 
469  const int lsz_u = src->linesize[1];
470  const int lsz_v = src->linesize[2];
471  const uint8_t *p_u = src->data[1] + slice_start * lsz_u;
472  const uint8_t *p_v = src->data[2] + slice_start * lsz_v;
473 
474  const int lsz_sat = dst_sat->linesize[0];
475  const int lsz_hue = dst_hue->linesize[0];
476  uint8_t *p_sat = dst_sat->data[0] + slice_start * lsz_sat;
477  uint8_t *p_hue = dst_hue->data[0] + slice_start * lsz_hue;
478 
479  for (j = slice_start; j < slice_end; j++) {
480  for (i = 0; i < s->chromaw; i++) {
481  const int yuvu = p_u[i];
482  const int yuvv = p_v[i];
483  p_sat[i] = hypotf(yuvu - 128, yuvv - 128); // int or round?
484  ((int16_t*)p_hue)[i] = fmodf(floorf((180.f / M_PI) * atan2f(yuvu-128, yuvv-128) + 180.f), 360.f);
485  }
486  p_u += lsz_u;
487  p_v += lsz_v;
488  p_sat += lsz_sat;
489  p_hue += lsz_hue;
490  }
491 
492  return 0;
493 }
494 
495 static int compute_sat_hue_metrics16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
496 {
497  int i, j;
499  const SignalstatsContext *s = ctx->priv;
500  const AVFrame *src = td->src;
501  AVFrame *dst_sat = td->dst_sat;
502  AVFrame *dst_hue = td->dst_hue;
503  const int mid = 1 << (s->depth - 1);
504 
505  const int slice_start = (s->chromah * jobnr ) / nb_jobs;
506  const int slice_end = (s->chromah * (jobnr+1)) / nb_jobs;
507 
508  const int lsz_u = src->linesize[1] / 2;
509  const int lsz_v = src->linesize[2] / 2;
510  const uint16_t *p_u = (uint16_t*)src->data[1] + slice_start * lsz_u;
511  const uint16_t *p_v = (uint16_t*)src->data[2] + slice_start * lsz_v;
512 
513  const int lsz_sat = dst_sat->linesize[0] / 2;
514  const int lsz_hue = dst_hue->linesize[0] / 2;
515  uint16_t *p_sat = (uint16_t*)dst_sat->data[0] + slice_start * lsz_sat;
516  uint16_t *p_hue = (uint16_t*)dst_hue->data[0] + slice_start * lsz_hue;
517 
518  for (j = slice_start; j < slice_end; j++) {
519  for (i = 0; i < s->chromaw; i++) {
520  const int yuvu = p_u[i];
521  const int yuvv = p_v[i];
522  p_sat[i] = hypotf(yuvu - mid, yuvv - mid); // int or round?
523  ((int16_t*)p_hue)[i] = fmodf(floorf((180.f / M_PI) * atan2f(yuvu-mid, yuvv-mid) + 180.f), 360.f);
524  }
525  p_u += lsz_u;
526  p_v += lsz_v;
527  p_sat += lsz_sat;
528  p_hue += lsz_hue;
529  }
530 
531  return 0;
532 }
533 
534 static unsigned compute_bit_depth(uint16_t mask)
535 {
536  return av_popcount(mask);
537 }
538 
540 {
541  AVFilterContext *ctx = link->dst;
542  SignalstatsContext *s = ctx->priv;
543  AVFilterLink *outlink = ctx->outputs[0];
544  AVFrame *out = in;
545  int i, j;
546  int w = 0, cw = 0, // in
547  pw = 0, cpw = 0; // prev
548  int fil;
549  char metabuf[128];
550  unsigned int *histy = s->histy,
551  *histu = s->histu,
552  *histv = s->histv,
553  histhue[360] = {0},
554  *histsat = s->histsat;
555  int miny = -1, minu = -1, minv = -1;
556  int maxy = -1, maxu = -1, maxv = -1;
557  int lowy = -1, lowu = -1, lowv = -1;
558  int highy = -1, highu = -1, highv = -1;
559  int minsat = -1, maxsat = -1, lowsat = -1, highsat = -1;
560  int lowp, highp, clowp, chighp;
561  int accy, accu, accv;
562  int accsat, acchue = 0;
563  int medhue, maxhue;
564  int toty = 0, totu = 0, totv = 0, totsat=0;
565  int tothue = 0;
566  int dify = 0, difu = 0, difv = 0;
567  uint16_t masky = 0, masku = 0, maskv = 0;
568 
569  int filtot[FILT_NUMB] = {0};
570  AVFrame *prev;
571 
572  AVFrame *sat = s->frame_sat;
573  AVFrame *hue = s->frame_hue;
574  const uint8_t *p_sat = sat->data[0];
575  const uint8_t *p_hue = hue->data[0];
576  const int lsz_sat = sat->linesize[0];
577  const int lsz_hue = hue->linesize[0];
578  ThreadDataHueSatMetrics td_huesat = {
579  .src = in,
580  .dst_sat = sat,
581  .dst_hue = hue,
582  };
583 
584  if (!s->frame_prev)
585  s->frame_prev = av_frame_clone(in);
586 
587  prev = s->frame_prev;
588 
589  if (s->outfilter != FILTER_NONE) {
590  out = av_frame_clone(in);
592  }
593 
595  NULL, FFMIN(s->chromah, ff_filter_get_nb_threads(ctx)));
596 
597  // Calculate luma histogram and difference with previous frame or field.
598  memset(s->histy, 0, s->maxsize * sizeof(*s->histy));
599  for (j = 0; j < link->h; j++) {
600  for (i = 0; i < link->w; i++) {
601  const int yuv = in->data[0][w + i];
602 
603  masky |= yuv;
604  histy[yuv]++;
605  dify += abs(yuv - prev->data[0][pw + i]);
606  }
607  w += in->linesize[0];
608  pw += prev->linesize[0];
609  }
610 
611  // Calculate chroma histogram and difference with previous frame or field.
612  memset(s->histu, 0, s->maxsize * sizeof(*s->histu));
613  memset(s->histv, 0, s->maxsize * sizeof(*s->histv));
614  memset(s->histsat, 0, s->maxsize * sizeof(*s->histsat));
615  for (j = 0; j < s->chromah; j++) {
616  for (i = 0; i < s->chromaw; i++) {
617  const int yuvu = in->data[1][cw+i];
618  const int yuvv = in->data[2][cw+i];
619 
620  masku |= yuvu;
621  maskv |= yuvv;
622  histu[yuvu]++;
623  difu += abs(yuvu - prev->data[1][cpw+i]);
624  histv[yuvv]++;
625  difv += abs(yuvv - prev->data[2][cpw+i]);
626 
627  histsat[p_sat[i]]++;
628  histhue[((int16_t*)p_hue)[i]]++;
629  }
630  cw += in->linesize[1];
631  cpw += prev->linesize[1];
632  p_sat += lsz_sat;
633  p_hue += lsz_hue;
634  }
635 
636  for (fil = 0; fil < FILT_NUMB; fil ++) {
637  if (s->filters & 1<<fil) {
638  ThreadData td = {
639  .in = in,
640  .out = out != in && s->outfilter == fil ? out : NULL,
641  };
642  memset(s->jobs_rets, 0, s->nb_jobs * sizeof(*s->jobs_rets));
644  &td, s->jobs_rets, s->nb_jobs);
645  for (i = 0; i < s->nb_jobs; i++)
646  filtot[fil] += s->jobs_rets[i];
647  }
648  }
649 
650  // find low / high based on histogram percentile
651  // these only need to be calculated once.
652 
653  lowp = lrint(s->fs * 10 / 100.);
654  highp = lrint(s->fs * 90 / 100.);
655  clowp = lrint(s->cfs * 10 / 100.);
656  chighp = lrint(s->cfs * 90 / 100.);
657 
658  accy = accu = accv = accsat = 0;
659  for (fil = 0; fil < s->maxsize; fil++) {
660  if (miny < 0 && histy[fil]) miny = fil;
661  if (minu < 0 && histu[fil]) minu = fil;
662  if (minv < 0 && histv[fil]) minv = fil;
663  if (minsat < 0 && histsat[fil]) minsat = fil;
664 
665  if (histy[fil]) maxy = fil;
666  if (histu[fil]) maxu = fil;
667  if (histv[fil]) maxv = fil;
668  if (histsat[fil]) maxsat = fil;
669 
670  toty += histy[fil] * fil;
671  totu += histu[fil] * fil;
672  totv += histv[fil] * fil;
673  totsat += histsat[fil] * fil;
674 
675  accy += histy[fil];
676  accu += histu[fil];
677  accv += histv[fil];
678  accsat += histsat[fil];
679 
680  if (lowy == -1 && accy >= lowp) lowy = fil;
681  if (lowu == -1 && accu >= clowp) lowu = fil;
682  if (lowv == -1 && accv >= clowp) lowv = fil;
683  if (lowsat == -1 && accsat >= clowp) lowsat = fil;
684 
685  if (highy == -1 && accy >= highp) highy = fil;
686  if (highu == -1 && accu >= chighp) highu = fil;
687  if (highv == -1 && accv >= chighp) highv = fil;
688  if (highsat == -1 && accsat >= chighp) highsat = fil;
689  }
690 
691  maxhue = histhue[0];
692  medhue = -1;
693  for (fil = 0; fil < 360; fil++) {
694  tothue += histhue[fil] * fil;
695  acchue += histhue[fil];
696 
697  if (medhue == -1 && acchue > s->cfs / 2)
698  medhue = fil;
699  if (histhue[fil] > maxhue) {
700  maxhue = histhue[fil];
701  }
702  }
703 
704  av_frame_free(&s->frame_prev);
705  s->frame_prev = av_frame_clone(in);
706 
707 #define SET_META(key, fmt, val) do { \
708  snprintf(metabuf, sizeof(metabuf), fmt, val); \
709  av_dict_set(&out->metadata, "lavfi.signalstats." key, metabuf, 0); \
710 } while (0)
711 
712  SET_META("YMIN", "%d", miny);
713  SET_META("YLOW", "%d", lowy);
714  SET_META("YAVG", "%g", 1.0 * toty / s->fs);
715  SET_META("YHIGH", "%d", highy);
716  SET_META("YMAX", "%d", maxy);
717 
718  SET_META("UMIN", "%d", minu);
719  SET_META("ULOW", "%d", lowu);
720  SET_META("UAVG", "%g", 1.0 * totu / s->cfs);
721  SET_META("UHIGH", "%d", highu);
722  SET_META("UMAX", "%d", maxu);
723 
724  SET_META("VMIN", "%d", minv);
725  SET_META("VLOW", "%d", lowv);
726  SET_META("VAVG", "%g", 1.0 * totv / s->cfs);
727  SET_META("VHIGH", "%d", highv);
728  SET_META("VMAX", "%d", maxv);
729 
730  SET_META("SATMIN", "%d", minsat);
731  SET_META("SATLOW", "%d", lowsat);
732  SET_META("SATAVG", "%g", 1.0 * totsat / s->cfs);
733  SET_META("SATHIGH", "%d", highsat);
734  SET_META("SATMAX", "%d", maxsat);
735 
736  SET_META("HUEMED", "%d", medhue);
737  SET_META("HUEAVG", "%g", 1.0 * tothue / s->cfs);
738 
739  SET_META("YDIF", "%g", 1.0 * dify / s->fs);
740  SET_META("UDIF", "%g", 1.0 * difu / s->cfs);
741  SET_META("VDIF", "%g", 1.0 * difv / s->cfs);
742 
743  SET_META("YBITDEPTH", "%d", compute_bit_depth(masky));
744  SET_META("UBITDEPTH", "%d", compute_bit_depth(masku));
745  SET_META("VBITDEPTH", "%d", compute_bit_depth(maskv));
746 
747  for (fil = 0; fil < FILT_NUMB; fil ++) {
748  if (s->filters & 1<<fil) {
749  char metaname[128];
750  snprintf(metabuf, sizeof(metabuf), "%g", 1.0 * filtot[fil] / s->fs);
751  snprintf(metaname, sizeof(metaname), "lavfi.signalstats.%s", filters_def[fil].name);
752  av_dict_set(&out->metadata, metaname, metabuf, 0);
753  }
754  }
755 
756  if (in != out)
757  av_frame_free(&in);
758  return ff_filter_frame(outlink, out);
759 }
760 
762 {
763  AVFilterContext *ctx = link->dst;
764  SignalstatsContext *s = ctx->priv;
765  AVFilterLink *outlink = ctx->outputs[0];
766  AVFrame *out = in;
767  int i, j;
768  int w = 0, cw = 0, // in
769  pw = 0, cpw = 0; // prev
770  int fil;
771  char metabuf[128];
772  unsigned int *histy = s->histy,
773  *histu = s->histu,
774  *histv = s->histv,
775  histhue[360] = {0},
776  *histsat = s->histsat;
777  int miny = -1, minu = -1, minv = -1;
778  int maxy = -1, maxu = -1, maxv = -1;
779  int lowy = -1, lowu = -1, lowv = -1;
780  int highy = -1, highu = -1, highv = -1;
781  int minsat = -1, maxsat = -1, lowsat = -1, highsat = -1;
782  int lowp, highp, clowp, chighp;
783  int accy, accu, accv;
784  int accsat, acchue = 0;
785  int medhue, maxhue;
786  int64_t toty = 0, totu = 0, totv = 0, totsat=0;
787  int64_t tothue = 0;
788  int64_t dify = 0, difu = 0, difv = 0;
789  uint16_t masky = 0, masku = 0, maskv = 0;
790 
791  int filtot[FILT_NUMB] = {0};
792  AVFrame *prev;
793 
794  AVFrame *sat = s->frame_sat;
795  AVFrame *hue = s->frame_hue;
796  const uint16_t *p_sat = (uint16_t *)sat->data[0];
797  const uint16_t *p_hue = (uint16_t *)hue->data[0];
798  const int lsz_sat = sat->linesize[0] / 2;
799  const int lsz_hue = hue->linesize[0] / 2;
800  ThreadDataHueSatMetrics td_huesat = {
801  .src = in,
802  .dst_sat = sat,
803  .dst_hue = hue,
804  };
805 
806  if (!s->frame_prev)
807  s->frame_prev = av_frame_clone(in);
808 
809  prev = s->frame_prev;
810 
811  if (s->outfilter != FILTER_NONE) {
812  out = av_frame_clone(in);
814  }
815 
817  NULL, FFMIN(s->chromah, ff_filter_get_nb_threads(ctx)));
818 
819  // Calculate luma histogram and difference with previous frame or field.
820  memset(s->histy, 0, s->maxsize * sizeof(*s->histy));
821  for (j = 0; j < link->h; j++) {
822  for (i = 0; i < link->w; i++) {
823  const int yuv = AV_RN16(in->data[0] + w + i * 2);
824 
825  masky |= yuv;
826  histy[yuv]++;
827  dify += abs(yuv - (int)AV_RN16(prev->data[0] + pw + i * 2));
828  }
829  w += in->linesize[0];
830  pw += prev->linesize[0];
831  }
832 
833  // Calculate chroma histogram and difference with previous frame or field.
834  memset(s->histu, 0, s->maxsize * sizeof(*s->histu));
835  memset(s->histv, 0, s->maxsize * sizeof(*s->histv));
836  memset(s->histsat, 0, s->maxsize * sizeof(*s->histsat));
837  for (j = 0; j < s->chromah; j++) {
838  for (i = 0; i < s->chromaw; i++) {
839  const int yuvu = AV_RN16(in->data[1] + cw + i * 2);
840  const int yuvv = AV_RN16(in->data[2] + cw + i * 2);
841 
842  masku |= yuvu;
843  maskv |= yuvv;
844  histu[yuvu]++;
845  difu += abs(yuvu - (int)AV_RN16(prev->data[1] + cpw + i * 2));
846  histv[yuvv]++;
847  difv += abs(yuvv - (int)AV_RN16(prev->data[2] + cpw + i * 2));
848 
849  histsat[p_sat[i]]++;
850  histhue[((int16_t*)p_hue)[i]]++;
851  }
852  cw += in->linesize[1];
853  cpw += prev->linesize[1];
854  p_sat += lsz_sat;
855  p_hue += lsz_hue;
856  }
857 
858  for (fil = 0; fil < FILT_NUMB; fil ++) {
859  if (s->filters & 1<<fil) {
860  ThreadData td = {
861  .in = in,
862  .out = out != in && s->outfilter == fil ? out : NULL,
863  };
864  memset(s->jobs_rets, 0, s->nb_jobs * sizeof(*s->jobs_rets));
866  &td, s->jobs_rets, s->nb_jobs);
867  for (i = 0; i < s->nb_jobs; i++)
868  filtot[fil] += s->jobs_rets[i];
869  }
870  }
871 
872  // find low / high based on histogram percentile
873  // these only need to be calculated once.
874 
875  lowp = lrint(s->fs * 10 / 100.);
876  highp = lrint(s->fs * 90 / 100.);
877  clowp = lrint(s->cfs * 10 / 100.);
878  chighp = lrint(s->cfs * 90 / 100.);
879 
880  accy = accu = accv = accsat = 0;
881  for (fil = 0; fil < s->maxsize; fil++) {
882  if (miny < 0 && histy[fil]) miny = fil;
883  if (minu < 0 && histu[fil]) minu = fil;
884  if (minv < 0 && histv[fil]) minv = fil;
885  if (minsat < 0 && histsat[fil]) minsat = fil;
886 
887  if (histy[fil]) maxy = fil;
888  if (histu[fil]) maxu = fil;
889  if (histv[fil]) maxv = fil;
890  if (histsat[fil]) maxsat = fil;
891 
892  toty += histy[fil] * fil;
893  totu += histu[fil] * fil;
894  totv += histv[fil] * fil;
895  totsat += histsat[fil] * fil;
896 
897  accy += histy[fil];
898  accu += histu[fil];
899  accv += histv[fil];
900  accsat += histsat[fil];
901 
902  if (lowy == -1 && accy >= lowp) lowy = fil;
903  if (lowu == -1 && accu >= clowp) lowu = fil;
904  if (lowv == -1 && accv >= clowp) lowv = fil;
905  if (lowsat == -1 && accsat >= clowp) lowsat = fil;
906 
907  if (highy == -1 && accy >= highp) highy = fil;
908  if (highu == -1 && accu >= chighp) highu = fil;
909  if (highv == -1 && accv >= chighp) highv = fil;
910  if (highsat == -1 && accsat >= chighp) highsat = fil;
911  }
912 
913  maxhue = histhue[0];
914  medhue = -1;
915  for (fil = 0; fil < 360; fil++) {
916  tothue += histhue[fil] * fil;
917  acchue += histhue[fil];
918 
919  if (medhue == -1 && acchue > s->cfs / 2)
920  medhue = fil;
921  if (histhue[fil] > maxhue) {
922  maxhue = histhue[fil];
923  }
924  }
925 
926  av_frame_free(&s->frame_prev);
927  s->frame_prev = av_frame_clone(in);
928 
929  SET_META("YMIN", "%d", miny);
930  SET_META("YLOW", "%d", lowy);
931  SET_META("YAVG", "%g", 1.0 * toty / s->fs);
932  SET_META("YHIGH", "%d", highy);
933  SET_META("YMAX", "%d", maxy);
934 
935  SET_META("UMIN", "%d", minu);
936  SET_META("ULOW", "%d", lowu);
937  SET_META("UAVG", "%g", 1.0 * totu / s->cfs);
938  SET_META("UHIGH", "%d", highu);
939  SET_META("UMAX", "%d", maxu);
940 
941  SET_META("VMIN", "%d", minv);
942  SET_META("VLOW", "%d", lowv);
943  SET_META("VAVG", "%g", 1.0 * totv / s->cfs);
944  SET_META("VHIGH", "%d", highv);
945  SET_META("VMAX", "%d", maxv);
946 
947  SET_META("SATMIN", "%d", minsat);
948  SET_META("SATLOW", "%d", lowsat);
949  SET_META("SATAVG", "%g", 1.0 * totsat / s->cfs);
950  SET_META("SATHIGH", "%d", highsat);
951  SET_META("SATMAX", "%d", maxsat);
952 
953  SET_META("HUEMED", "%d", medhue);
954  SET_META("HUEAVG", "%g", 1.0 * tothue / s->cfs);
955 
956  SET_META("YDIF", "%g", 1.0 * dify / s->fs);
957  SET_META("UDIF", "%g", 1.0 * difu / s->cfs);
958  SET_META("VDIF", "%g", 1.0 * difv / s->cfs);
959 
960  SET_META("YBITDEPTH", "%d", compute_bit_depth(masky));
961  SET_META("UBITDEPTH", "%d", compute_bit_depth(masku));
962  SET_META("VBITDEPTH", "%d", compute_bit_depth(maskv));
963 
964  for (fil = 0; fil < FILT_NUMB; fil ++) {
965  if (s->filters & 1<<fil) {
966  char metaname[128];
967  snprintf(metabuf, sizeof(metabuf), "%g", 1.0 * filtot[fil] / s->fs);
968  snprintf(metaname, sizeof(metaname), "lavfi.signalstats.%s", filters_def[fil].name);
969  av_dict_set(&out->metadata, metaname, metabuf, 0);
970  }
971  }
972 
973  if (in != out)
974  av_frame_free(&in);
975  return ff_filter_frame(outlink, out);
976 }
977 
979 {
980  AVFilterContext *ctx = link->dst;
981  SignalstatsContext *s = ctx->priv;
982 
983  if (s->depth > 8)
984  return filter_frame16(link, in);
985  else
986  return filter_frame8(link, in);
987 }
988 
989 static const AVFilterPad signalstats_inputs[] = {
990  {
991  .name = "default",
992  .type = AVMEDIA_TYPE_VIDEO,
993  .filter_frame = filter_frame,
994  },
995 };
996 
998  {
999  .name = "default",
1000  .config_props = config_output,
1001  .type = AVMEDIA_TYPE_VIDEO,
1002  },
1003 };
1004 
1006  .name = "signalstats",
1007  .description = "Generate statistics from video analysis.",
1008  .init = init,
1009  .uninit = uninit,
1010  .priv_size = sizeof(SignalstatsContext),
1014  .priv_class = &signalstats_class,
1015  .flags = AVFILTER_FLAG_SLICE_THREADS,
1016 };
td
#define td
Definition: regdef.h:70
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
filter_frame8
static int filter_frame8(AVFilterLink *link, AVFrame *in)
Definition: vf_signalstats.c:539
r
const char * r
Definition: vf_curves.c:116
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
compute_sat_hue_metrics16
static int compute_sat_hue_metrics16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:495
SignalstatsContext::vsub
int vsub
Definition: vf_signalstats.c:41
out
FILE * out
Definition: movenc.c:54
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:243
SignalstatsContext::rgba_color
uint8_t rgba_color[4]
Definition: vf_signalstats.c:48
SignalstatsContext::chromah
int chromah
Definition: vf_signalstats.c:38
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1018
init
static av_cold int init(AVFilterContext *ctx)
Definition: vf_signalstats.c:89
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2660
SET_META
#define SET_META(key, fmt, val)
FILTER_PIXFMTS_ARRAY
#define FILTER_PIXFMTS_ARRAY(array)
Definition: internal.h:171
floorf
static __device__ float floorf(float a)
Definition: cuda_runtime.h:172
filter8_brng
static int filter8_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:212
atan2f
#define atan2f(y, x)
Definition: libm.h:45
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
AV_RN16
#define AV_RN16(p)
Definition: intreadwrite.h:360
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:109
SignalstatsContext::fs
int fs
Definition: vf_signalstats.c:43
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:317
av_frame_make_writable
int av_frame_make_writable(AVFrame *frame)
Ensure that the frame data is writable, avoiding data copy if possible.
Definition: frame.c:490
pixdesc.h
AVFrame::width
int width
Definition: frame.h:389
SignalstatsContext::nb_jobs
int nb_jobs
Definition: vf_signalstats.c:50
w
uint8_t w
Definition: llviddspenc.c:38
AVOption
AVOption.
Definition: opt.h:247
b
#define b
Definition: input.c:40
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:404
VREP_START
#define VREP_START
Definition: vf_signalstats.c:375
SignalstatsContext::histv
int * histv
Definition: vf_signalstats.c:54
ThreadData::in
const AVFrame * in
Definition: vf_signalstats.c:61
AV_PIX_FMT_YUV440P
@ AV_PIX_FMT_YUV440P
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:99
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(signalstats)
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:169
av_popcount
#define av_popcount
Definition: common.h:150
ThreadData::out
AVFrame * out
Definition: af_adeclick.c:473
filter_frame16
static int filter_frame16(AVFilterLink *link, AVFrame *in)
Definition: vf_signalstats.c:761
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:338
FLAGS
#define FLAGS
Definition: vf_signalstats.c:71
FilterMode
FilterMode
Definition: vp9.h:64
AV_PIX_FMT_YUV422P9
#define AV_PIX_FMT_YUV422P9
Definition: pixfmt.h:402
SignalstatsContext::histu
int * histu
Definition: vf_signalstats.c:54
filter16_tout
static int filter16_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:334
AV_PIX_FMT_GRAY16
#define AV_PIX_FMT_GRAY16
Definition: pixfmt.h:388
filter_tout_outlier
static int filter_tout_outlier(uint8_t x, uint8_t y, uint8_t z)
Definition: vf_signalstats.c:281
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: vf_signalstats.c:120
FILT_NUMB
@ FILT_NUMB
Definition: vf_signalstats.c:33
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:50
filters
#define filters(fmt, inverse, clip, i, c)
Definition: af_crystalizer.c:221
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:97
AV_PIX_FMT_YUV444P10
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:407
AV_PIX_FMT_YUVJ411P
@ AV_PIX_FMT_YUVJ411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG), deprecated in favor ...
Definition: pixfmt.h:248
SignalstatsContext
Definition: vf_signalstats.c:36
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_signalstats.c:106
mult
static int16_t mult(Float11 *f1, Float11 *f2)
Definition: g726.c:56
lrint
#define lrint
Definition: tablegen.h:53
FILTER_VREP
@ FILTER_VREP
Definition: vf_signalstats.c:31
av_cold
#define av_cold
Definition: attributes.h:90
AV_PIX_FMT_YUV422P16
#define AV_PIX_FMT_YUV422P16
Definition: pixfmt.h:416
mask
static const uint16_t mask[17]
Definition: lzw.c:38
AV_PIX_FMT_YUVJ422P
@ AV_PIX_FMT_YUVJ422P
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:79
alloc_frame
static AVFrame * alloc_frame(enum AVPixelFormat pixfmt, int w, int h)
Definition: vf_signalstats.c:135
SignalstatsContext::yuv_color
int yuv_color[3]
Definition: vf_signalstats.c:49
OFFSET
#define OFFSET(x)
Definition: vf_signalstats.c:70
SignalstatsContext::histsat
int * histsat
Definition: vf_signalstats.c:54
intreadwrite.h
s
#define s(width, name)
Definition: cbs_vp9.c:257
AV_PIX_FMT_YUV444P16
#define AV_PIX_FMT_YUV444P16
Definition: pixfmt.h:417
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:51
g
const char * g
Definition: vf_curves.c:117
slice_end
static int slice_end(AVCodecContext *avctx, AVFrame *pict)
Handle slice ends.
Definition: mpeg12dec.c:2042
AV_PIX_FMT_YUV420P9
#define AV_PIX_FMT_YUV420P9
Definition: pixfmt.h:401
AV_PIX_FMT_YUV420P16
#define AV_PIX_FMT_YUV420P16
Definition: pixfmt.h:415
ctx
AVFormatContext * ctx
Definition: movenc.c:48
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:422
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
f
#define f(width, name)
Definition: cbs_vp9.c:255
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: internal.h:191
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
AV_PIX_FMT_YUVJ444P
@ AV_PIX_FMT_YUVJ444P
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:80
arg
const char * arg
Definition: jacosubdec.c:67
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
NULL
#define NULL
Definition: coverity.c:32
FILTER_BRNG
@ FILTER_BRNG
Definition: vf_signalstats.c:32
burn_frame16
static void burn_frame16(const SignalstatsContext *s, AVFrame *f, int x, int y)
Definition: vf_signalstats.c:202
filter8_tout
static int filter8_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:286
ThreadDataHueSatMetrics::dst_hue
AVFrame * dst_hue
Definition: vf_signalstats.c:67
SignalstatsContext::outfilter
int outfilter
Definition: vf_signalstats.c:45
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:78
AV_OPT_TYPE_COLOR
@ AV_OPT_TYPE_COLOR
Definition: opt.h:239
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:410
src
#define src
Definition: vp8dsp.c:255
AV_PIX_FMT_YUV440P10
#define AV_PIX_FMT_YUV440P10
Definition: pixfmt.h:406
abs
#define abs(x)
Definition: cuda_runtime.h:35
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:405
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:74
ff_vf_signalstats
const AVFilter ff_vf_signalstats
Definition: vf_signalstats.c:1005
for
for(j=16;j >0;--j)
Definition: h264pred_template.c:469
SignalstatsContext::hsub
int hsub
Definition: vf_signalstats.c:40
name
const char * name
Definition: vf_signalstats.c:447
signalstats_inputs
static const AVFilterPad signalstats_inputs[]
Definition: vf_signalstats.c:989
filters_def
static const struct @235 filters_def[]
burn_frame8
static void burn_frame8(const SignalstatsContext *s, AVFrame *f, int x, int y)
Definition: vf_signalstats.c:193
filter_frame
static int filter_frame(AVFilterLink *link, AVFrame *in)
Definition: vf_signalstats.c:978
SignalstatsContext::histy
int * histy
Definition: vf_signalstats.c:54
AV_PIX_FMT_YUV422P12
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:409
FILTER3
#define FILTER3(j)
AV_PIX_FMT_YUV444P12
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:411
signalstats_outputs
static const AVFilterPad signalstats_outputs[]
Definition: vf_signalstats.c:997
ThreadDataHueSatMetrics::dst_sat
AVFrame * dst_sat
Definition: vf_signalstats.c:67
ThreadDataHueSatMetrics
Definition: vf_signalstats.c:65
M_PI
#define M_PI
Definition: mathematics.h:52
internal.h
compute_sat_hue_metrics8
static int compute_sat_hue_metrics8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:457
SignalstatsContext::filters
int filters
Definition: vf_signalstats.c:46
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:271
process8
int(* process8)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:448
FILTER_TOUT
@ FILTER_TOUT
Definition: vf_signalstats.c:30
SignalstatsContext::frame_prev
AVFrame * frame_prev
Definition: vf_signalstats.c:47
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:32
ff_filter_get_nb_threads
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:803
SignalstatsContext::maxsize
int maxsize
Definition: vf_signalstats.c:53
config_output
static int config_output(AVFilterLink *outlink)
Definition: vf_signalstats.c:153
ThreadData
Used for passing data between threads.
Definition: dsddec.c:67
process16
int(* process16)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:449
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
AV_PIX_FMT_YUVJ440P
@ AV_PIX_FMT_YUVJ440P
planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range
Definition: pixfmt.h:100
SignalstatsContext::depth
int depth
Definition: vf_signalstats.c:42
filt
static const int8_t filt[NUMTAPS *2]
Definition: af_earwax.c:39
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:56
SignalstatsContext::chromaw
int chromaw
Definition: vf_signalstats.c:39
AV_PIX_FMT_YUV444P9
#define AV_PIX_FMT_YUV444P9
Definition: pixfmt.h:403
AVFilter
Filter definition.
Definition: avfilter.h:165
pixfmt
enum AVPixelFormat pixfmt
Definition: kmsgrab.c:365
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
SignalstatsContext::cfs
int cfs
Definition: vf_signalstats.c:44
AV_PIX_FMT_YUV420P12
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:408
filter16_brng
static int filter16_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:246
AV_PIX_FMT_YUV422P14
#define AV_PIX_FMT_YUV422P14
Definition: pixfmt.h:413
AVFrame::height
int height
Definition: frame.h:389
SignalstatsContext::frame_sat
AVFrame * frame_sat
Definition: vf_signalstats.c:56
FILTER_NONE
@ FILTER_NONE
Definition: vf_signalstats.c:29
signalstats_options
static const AVOption signalstats_options[]
Definition: vf_signalstats.c:73
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:224
filter8_vrep
static int filter8_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:377
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
AVFilterContext
An instance of a filter.
Definition: avfilter.h:402
AVFILTER_FLAG_SLICE_THREADS
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:121
desc
const char * desc
Definition: libsvtav1.c:79
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
filter16_vrep
static int filter16_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_signalstats.c:411
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: internal.h:192
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:70
AV_PIX_FMT_YUV411P
@ AV_PIX_FMT_YUV411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:73
AV_OPT_TYPE_FLAGS
@ AV_OPT_TYPE_FLAGS
Definition: opt.h:223
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:362
SignalstatsContext::jobs_rets
int * jobs_rets
Definition: vf_signalstats.c:51
AV_PIX_FMT_YUV440P12
#define AV_PIX_FMT_YUV440P12
Definition: pixfmt.h:410
h
h
Definition: vp9dsp_template.c:2038
AV_PIX_FMT_YUV444P14
#define AV_PIX_FMT_YUV444P14
Definition: pixfmt.h:414
ff_filter_execute
static av_always_inline int ff_filter_execute(AVFilterContext *ctx, avfilter_action_func *func, void *arg, int *ret, int nb_jobs)
Definition: internal.h:143
compute_bit_depth
static unsigned compute_bit_depth(uint16_t mask)
Definition: vf_signalstats.c:534
int
int
Definition: ffmpeg_filter.c:153
SignalstatsContext::frame_hue
AVFrame * frame_hue
Definition: vf_signalstats.c:57
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:233
snprintf
#define snprintf
Definition: snprintf.h:34
ThreadDataHueSatMetrics::src
const AVFrame * src
Definition: vf_signalstats.c:66
AV_PIX_FMT_YUV420P14
#define AV_PIX_FMT_YUV420P14
Definition: pixfmt.h:412
AV_WN16
#define AV_WN16(p, v)
Definition: intreadwrite.h:372