FFmpeg
vf_datascope.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2016 Paul B Mahol
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 #include "libavutil/avassert.h"
22 #include "libavutil/intreadwrite.h"
23 #include "libavutil/opt.h"
24 #include "libavutil/parseutils.h"
25 #include "libavutil/pixdesc.h"
27 #include "avfilter.h"
28 #include "drawutils.h"
29 #include "formats.h"
30 #include "internal.h"
31 #include "video.h"
32 
33 typedef struct DatascopeContext {
34  const AVClass *class;
35  int ow, oh;
36  int x, y;
37  int mode;
38  int dformat;
39  int axis;
40  float opacity;
41 
42  int nb_planes;
43  int nb_comps;
44  int chars;
50 
53  int (*filter)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
55 
56 #define OFFSET(x) offsetof(DatascopeContext, x)
57 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
58 #define FLAGSR AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_RUNTIME_PARAM
59 
60 static const AVOption datascope_options[] = {
61  { "size", "set output size", OFFSET(ow), AV_OPT_TYPE_IMAGE_SIZE, {.str="hd720"}, 0, 0, FLAGS },
62  { "s", "set output size", OFFSET(ow), AV_OPT_TYPE_IMAGE_SIZE, {.str="hd720"}, 0, 0, FLAGS },
63  { "x", "set x offset", OFFSET(x), AV_OPT_TYPE_INT, {.i64=0}, 0, INT_MAX, FLAGS },
64  { "y", "set y offset", OFFSET(y), AV_OPT_TYPE_INT, {.i64=0}, 0, INT_MAX, FLAGS },
65  { "mode", "set scope mode", OFFSET(mode), AV_OPT_TYPE_INT, {.i64=0}, 0, 2, FLAGS, "mode" },
66  { "mono", NULL, 0, AV_OPT_TYPE_CONST, {.i64=0}, 0, 0, FLAGS, "mode" },
67  { "color", NULL, 0, AV_OPT_TYPE_CONST, {.i64=1}, 0, 0, FLAGS, "mode" },
68  { "color2", NULL, 0, AV_OPT_TYPE_CONST, {.i64=2}, 0, 0, FLAGS, "mode" },
69  { "axis", "draw column/row numbers", OFFSET(axis), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS },
70  { "opacity", "set background opacity", OFFSET(opacity), AV_OPT_TYPE_FLOAT, {.dbl=0.75}, 0, 1, FLAGS },
71  { "format", "set display number format", OFFSET(dformat), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, FLAGS, "format" },
72  { "hex", NULL, 0, AV_OPT_TYPE_CONST, {.i64=0}, 0, 0, FLAGS, "format" },
73  { "dec", NULL, 0, AV_OPT_TYPE_CONST, {.i64=1}, 0, 0, FLAGS, "format" },
74  { NULL }
75 };
76 
77 AVFILTER_DEFINE_CLASS(datascope);
78 
80 {
82 }
83 
85  int x0, int y0, const uint8_t *text, int vertical)
86 {
87  int x = x0;
88 
89  for (; *text; text++) {
90  if (*text == '\n') {
91  x = x0;
92  y0 += 8;
93  continue;
94  }
95  ff_blend_mask(draw, color, frame->data, frame->linesize,
96  frame->width, frame->height,
97  avpriv_cga_font + *text * 8, 1, 8, 8, 0, 0, x, y0);
98  if (vertical) {
99  x = x0;
100  y0 += 8;
101  } else {
102  x += 8;
103  }
104  }
105 }
106 
107 static void pick_color8(FFDrawContext *draw, FFDrawColor *color, AVFrame *in, int x, int y, int *value)
108 {
109  int p, i;
110 
111  color->rgba[3] = 255;
112  for (p = 0; p < draw->nb_planes; p++) {
113  if (draw->nb_planes == 1) {
114  for (i = 0; i < 4; i++) {
115  value[i] = in->data[0][y * in->linesize[0] + x * draw->pixelstep[0] + i];
116  color->comp[0].u8[i] = value[i];
117  }
118  } else {
119  value[p] = in->data[p][(y >> draw->vsub[p]) * in->linesize[p] + (x >> draw->hsub[p])];
120  color->comp[p].u8[0] = value[p];
121  }
122  }
123 }
124 
125 static void pick_color16(FFDrawContext *draw, FFDrawColor *color, AVFrame *in, int x, int y, int *value)
126 {
127  int p, i;
128 
129  color->rgba[3] = 255;
130  for (p = 0; p < draw->nb_planes; p++) {
131  if (draw->nb_planes == 1) {
132  for (i = 0; i < 4; i++) {
133  value[i] = AV_RL16(in->data[0] + y * in->linesize[0] + x * draw->pixelstep[0] + i * 2);
134  color->comp[0].u16[i] = value[i];
135  }
136  } else {
137  value[p] = AV_RL16(in->data[p] + (y >> draw->vsub[p]) * in->linesize[p] + (x >> draw->hsub[p]) * 2);
138  color->comp[p].u16[0] = value[p];
139  }
140  }
141 }
142 
144 {
145  int p;
146 
147  reverse->rgba[3] = 255;
148  for (p = 0; p < draw->nb_planes; p++) {
149  reverse->comp[p].u8[0] = color->comp[p].u8[0] > 127 ? 0 : 255;
150  reverse->comp[p].u8[1] = color->comp[p].u8[1] > 127 ? 0 : 255;
151  reverse->comp[p].u8[2] = color->comp[p].u8[2] > 127 ? 0 : 255;
152  }
153 }
154 
156 {
157  int p;
158 
159  reverse->rgba[3] = 255;
160  for (p = 0; p < draw->nb_planes; p++) {
161  const unsigned max = (1 << draw->desc->comp[p].depth) - 1;
162  const unsigned mid = (max + 1) / 2;
163 
164  reverse->comp[p].u16[0] = color->comp[p].u16[0] > mid ? 0 : max;
165  reverse->comp[p].u16[1] = color->comp[p].u16[1] > mid ? 0 : max;
166  reverse->comp[p].u16[2] = color->comp[p].u16[2] > mid ? 0 : max;
167  }
168 }
169 
170 typedef struct ThreadData {
171  AVFrame *in, *out;
172  int xoff, yoff;
173 } ThreadData;
174 
175 static int filter_color2(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
176 {
177  DatascopeContext *s = ctx->priv;
178  AVFilterLink *outlink = ctx->outputs[0];
179  AVFilterLink *inlink = ctx->inputs[0];
180  ThreadData *td = arg;
181  AVFrame *in = td->in;
182  AVFrame *out = td->out;
183  const int xoff = td->xoff;
184  const int yoff = td->yoff;
185  const int P = FFMAX(s->nb_planes, s->nb_comps);
186  const int C = s->chars;
187  const int D = ((s->chars - s->dformat) >> 2) + s->dformat * 2;
188  const int W = (outlink->w - xoff) / (C * 10);
189  const int H = (outlink->h - yoff) / (P * 12);
190  const char *format[4] = {"%02X\n", "%04X\n", "%03d\n", "%05d\n"};
191  const int slice_start = (W * jobnr) / nb_jobs;
192  const int slice_end = (W * (jobnr+1)) / nb_jobs;
193  int x, y, p;
194 
195  for (y = 0; y < H && (y + s->y < inlink->h); y++) {
196  for (x = slice_start; x < slice_end && (x + s->x < inlink->w); x++) {
197  FFDrawColor color = { { 0 } };
198  FFDrawColor reverse = { { 0 } };
199  int value[4] = { 0 };
200 
201  s->pick_color(&s->draw, &color, in, x + s->x, y + s->y, value);
202  s->reverse_color(&s->draw, &color, &reverse);
203  ff_fill_rectangle(&s->draw, &color, out->data, out->linesize,
204  xoff + x * C * 10, yoff + y * P * 12, C * 10, P * 12);
205 
206  for (p = 0; p < P; p++) {
207  char text[256];
208 
209  snprintf(text, sizeof(text), format[D], value[p]);
210  draw_text(&s->draw, out, &reverse, xoff + x * C * 10 + 2, yoff + y * P * 12 + p * 10 + 2, text, 0);
211  }
212  }
213  }
214 
215  return 0;
216 }
217 
218 static int filter_color(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
219 {
220  DatascopeContext *s = ctx->priv;
221  AVFilterLink *outlink = ctx->outputs[0];
222  AVFilterLink *inlink = ctx->inputs[0];
223  ThreadData *td = arg;
224  AVFrame *in = td->in;
225  AVFrame *out = td->out;
226  const int xoff = td->xoff;
227  const int yoff = td->yoff;
228  const int P = FFMAX(s->nb_planes, s->nb_comps);
229  const int C = s->chars;
230  const int D = ((s->chars - s->dformat) >> 2) + s->dformat * 2;
231  const int W = (outlink->w - xoff) / (C * 10);
232  const int H = (outlink->h - yoff) / (P * 12);
233  const char *format[4] = {"%02X\n", "%04X\n", "%03d\n", "%05d\n"};
234  const int slice_start = (W * jobnr) / nb_jobs;
235  const int slice_end = (W * (jobnr+1)) / nb_jobs;
236  int x, y, p;
237 
238  for (y = 0; y < H && (y + s->y < inlink->h); y++) {
239  for (x = slice_start; x < slice_end && (x + s->x < inlink->w); x++) {
240  FFDrawColor color = { { 0 } };
241  int value[4] = { 0 };
242 
243  s->pick_color(&s->draw, &color, in, x + s->x, y + s->y, value);
244 
245  for (p = 0; p < P; p++) {
246  char text[256];
247 
248  snprintf(text, sizeof(text), format[D], value[p]);
249  draw_text(&s->draw, out, &color, xoff + x * C * 10 + 2, yoff + y * P * 12 + p * 10 + 2, text, 0);
250  }
251  }
252  }
253 
254  return 0;
255 }
256 
257 static int filter_mono(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
258 {
259  DatascopeContext *s = ctx->priv;
260  AVFilterLink *outlink = ctx->outputs[0];
261  AVFilterLink *inlink = ctx->inputs[0];
262  ThreadData *td = arg;
263  AVFrame *in = td->in;
264  AVFrame *out = td->out;
265  const int xoff = td->xoff;
266  const int yoff = td->yoff;
267  const int P = FFMAX(s->nb_planes, s->nb_comps);
268  const int C = s->chars;
269  const int D = ((s->chars - s->dformat) >> 2) + s->dformat * 2;
270  const int W = (outlink->w - xoff) / (C * 10);
271  const int H = (outlink->h - yoff) / (P * 12);
272  const char *format[4] = {"%02X\n", "%04X\n", "%03d\n", "%05d\n"};
273  const int slice_start = (W * jobnr) / nb_jobs;
274  const int slice_end = (W * (jobnr+1)) / nb_jobs;
275  int x, y, p;
276 
277  for (y = 0; y < H && (y + s->y < inlink->h); y++) {
278  for (x = slice_start; x < slice_end && (x + s->x < inlink->w); x++) {
279  FFDrawColor color = { { 0 } };
280  int value[4] = { 0 };
281 
282  s->pick_color(&s->draw, &color, in, x + s->x, y + s->y, value);
283  for (p = 0; p < P; p++) {
284  char text[256];
285 
286  snprintf(text, sizeof(text), format[D], value[p]);
287  draw_text(&s->draw, out, &s->white, xoff + x * C * 10 + 2, yoff + y * P * 12 + p * 10 + 2, text, 0);
288  }
289  }
290  }
291 
292  return 0;
293 }
294 
296 {
297  AVFilterContext *ctx = inlink->dst;
298  DatascopeContext *s = ctx->priv;
299  AVFilterLink *outlink = ctx->outputs[0];
300  ThreadData td = { 0 };
301  int ymaxlen = 0;
302  int xmaxlen = 0;
303  AVFrame *out;
304 
305  out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
306  if (!out) {
307  av_frame_free(&in);
308  return AVERROR(ENOMEM);
309  }
310  out->pts = in->pts;
311 
312  ff_fill_rectangle(&s->draw, &s->black, out->data, out->linesize,
313  0, 0, outlink->w, outlink->h);
314 
315  if (s->axis) {
316  const int P = FFMAX(s->nb_planes, s->nb_comps);
317  const int C = s->chars;
318  int Y = outlink->h / (P * 12);
319  int X = outlink->w / (C * 10);
320  char text[256] = { 0 };
321  int x, y;
322 
323  snprintf(text, sizeof(text), "%d", s->y + Y);
324  ymaxlen = strlen(text);
325  ymaxlen *= 10;
326  snprintf(text, sizeof(text), "%d", s->x + X);
327  xmaxlen = strlen(text);
328  xmaxlen *= 10;
329 
330  Y = (outlink->h - xmaxlen) / (P * 12);
331  X = (outlink->w - ymaxlen) / (C * 10);
332 
333  for (y = 0; y < Y; y++) {
334  snprintf(text, sizeof(text), "%d", s->y + y);
335 
336  ff_fill_rectangle(&s->draw, &s->gray, out->data, out->linesize,
337  0, xmaxlen + y * P * 12 + (P + 1) * P - 2, ymaxlen, 10);
338 
339  draw_text(&s->draw, out, &s->yellow, 2, xmaxlen + y * P * 12 + (P + 1) * P, text, 0);
340  }
341 
342  for (x = 0; x < X; x++) {
343  snprintf(text, sizeof(text), "%d", s->x + x);
344 
345  ff_fill_rectangle(&s->draw, &s->gray, out->data, out->linesize,
346  ymaxlen + x * C * 10 + 2 * C - 2, 0, 10, xmaxlen);
347 
348  draw_text(&s->draw, out, &s->yellow, ymaxlen + x * C * 10 + 2 * C, 2, text, 1);
349  }
350  }
351 
352  td.in = in; td.out = out, td.yoff = xmaxlen, td.xoff = ymaxlen;
353  ctx->internal->execute(ctx, s->filter, &td, NULL, FFMIN(ff_filter_get_nb_threads(ctx), FFMAX(outlink->w / 20, 1)));
354 
355  av_frame_free(&in);
356  return ff_filter_frame(outlink, out);
357 }
358 
360 {
361  DatascopeContext *s = inlink->dst->priv;
362  uint8_t alpha = s->opacity * 255;
363 
365  ff_draw_init(&s->draw, inlink->format, 0);
366  ff_draw_color(&s->draw, &s->white, (uint8_t[]){ 255, 255, 255, 255} );
367  ff_draw_color(&s->draw, &s->black, (uint8_t[]){ 0, 0, 0, alpha} );
368  ff_draw_color(&s->draw, &s->yellow, (uint8_t[]){ 255, 255, 0, 255} );
369  ff_draw_color(&s->draw, &s->gray, (uint8_t[]){ 77, 77, 77, 255} );
370  s->chars = (s->draw.desc->comp[0].depth + 7) / 8 * 2 + s->dformat;
371  s->nb_comps = s->draw.desc->nb_components;
372 
373  switch (s->mode) {
374  case 0: s->filter = filter_mono; break;
375  case 1: s->filter = filter_color; break;
376  case 2: s->filter = filter_color2; break;
377  }
378 
379  if (s->draw.desc->comp[0].depth <= 8) {
380  s->pick_color = pick_color8;
382  } else {
385  }
386 
387  return 0;
388 }
389 
390 static int config_output(AVFilterLink *outlink)
391 {
392  DatascopeContext *s = outlink->src->priv;
393 
394  outlink->h = s->oh;
395  outlink->w = s->ow;
396  outlink->sample_aspect_ratio = (AVRational){1,1};
397 
398  return 0;
399 }
400 
401 static const AVFilterPad inputs[] = {
402  {
403  .name = "default",
404  .type = AVMEDIA_TYPE_VIDEO,
405  .filter_frame = filter_frame,
406  .config_props = config_input,
407  },
408  { NULL }
409 };
410 
411 static const AVFilterPad outputs[] = {
412  {
413  .name = "default",
414  .type = AVMEDIA_TYPE_VIDEO,
415  .config_props = config_output,
416  },
417  { NULL }
418 };
419 
421  .name = "datascope",
422  .description = NULL_IF_CONFIG_SMALL("Video data analysis."),
423  .priv_size = sizeof(DatascopeContext),
424  .priv_class = &datascope_class,
426  .inputs = inputs,
427  .outputs = outputs,
429 };
430 
431 typedef struct PixscopeContext {
432  const AVClass *class;
433 
434  float xpos, ypos;
435  float wx, wy;
436  int w, h;
437  float o;
438 
439  int x, y;
440  int ww, wh;
441 
443  int nb_comps;
444  int is_rgb;
445  uint8_t rgba_map[4];
453  FFDrawColor *colors[4];
454 
457 
458 #define POFFSET(x) offsetof(PixscopeContext, x)
459 
460 static const AVOption pixscope_options[] = {
461  { "x", "set scope x offset", POFFSET(xpos), AV_OPT_TYPE_FLOAT, {.dbl=0.5}, 0, 1, FLAGS },
462  { "y", "set scope y offset", POFFSET(ypos), AV_OPT_TYPE_FLOAT, {.dbl=0.5}, 0, 1, FLAGS },
463  { "w", "set scope width", POFFSET(w), AV_OPT_TYPE_INT, {.i64=7}, 1, 80, FLAGS },
464  { "h", "set scope height", POFFSET(h), AV_OPT_TYPE_INT, {.i64=7}, 1, 80, FLAGS },
465  { "o", "set window opacity", POFFSET(o), AV_OPT_TYPE_FLOAT, {.dbl=0.5}, 0, 1, FLAGS },
466  { "wx", "set window x offset", POFFSET(wx), AV_OPT_TYPE_FLOAT, {.dbl=-1}, -1, 1, FLAGS },
467  { "wy", "set window y offset", POFFSET(wy), AV_OPT_TYPE_FLOAT, {.dbl=-1}, -1, 1, FLAGS },
468  { NULL }
469 };
470 
471 AVFILTER_DEFINE_CLASS(pixscope);
472 
474 {
475  PixscopeContext *s = inlink->dst->priv;
476 
478  ff_draw_init(&s->draw, inlink->format, 0);
479  ff_draw_color(&s->draw, &s->dark, (uint8_t[]){ 0, 0, 0, s->o * 255} );
480  ff_draw_color(&s->draw, &s->black, (uint8_t[]){ 0, 0, 0, 255} );
481  ff_draw_color(&s->draw, &s->white, (uint8_t[]){ 255, 255, 255, 255} );
482  ff_draw_color(&s->draw, &s->green, (uint8_t[]){ 0, 255, 0, 255} );
483  ff_draw_color(&s->draw, &s->blue, (uint8_t[]){ 0, 0, 255, 255} );
484  ff_draw_color(&s->draw, &s->red, (uint8_t[]){ 255, 0, 0, 255} );
485  s->nb_comps = s->draw.desc->nb_components;
487 
488  if (s->is_rgb) {
489  s->colors[0] = &s->red;
490  s->colors[1] = &s->green;
491  s->colors[2] = &s->blue;
492  s->colors[3] = &s->white;
493  ff_fill_rgba_map(s->rgba_map, inlink->format);
494  } else {
495  s->colors[0] = &s->white;
496  s->colors[1] = &s->blue;
497  s->colors[2] = &s->red;
498  s->colors[3] = &s->white;
499  s->rgba_map[0] = 0;
500  s->rgba_map[1] = 1;
501  s->rgba_map[2] = 2;
502  s->rgba_map[3] = 3;
503  }
504 
505  if (s->draw.desc->comp[0].depth <= 8) {
506  s->pick_color = pick_color8;
507  } else {
509  }
510 
511  if (inlink->w < 640 || inlink->h < 480) {
512  av_log(inlink->dst, AV_LOG_ERROR, "min supported resolution is 640x480\n");
513  return AVERROR(EINVAL);
514  }
515 
516  s->ww = 300;
517  s->wh = 300 * 1.6;
518  s->x = s->xpos * (inlink->w - 1);
519  s->y = s->ypos * (inlink->h - 1);
520  if (s->x + s->w >= inlink->w || s->y + s->h >= inlink->h) {
521  av_log(inlink->dst, AV_LOG_WARNING, "scope position is out of range, clipping\n");
522  s->x = FFMIN(s->x, inlink->w - s->w);
523  s->y = FFMIN(s->y, inlink->h - s->h);
524  }
525 
526  return 0;
527 }
528 
530 {
531  AVFilterContext *ctx = inlink->dst;
532  PixscopeContext *s = ctx->priv;
533  AVFilterLink *outlink = ctx->outputs[0];
534  AVFrame *out = ff_get_video_buffer(outlink, in->width, in->height);
535  int max[4] = { 0 }, min[4] = { INT_MAX, INT_MAX, INT_MAX, INT_MAX };
536  float average[4] = { 0 };
537  double rms[4] = { 0 };
538  const char rgba[4] = { 'R', 'G', 'B', 'A' };
539  const char yuva[4] = { 'Y', 'U', 'V', 'A' };
540  int x, y, X, Y, i, w, h;
541  char text[128];
542 
543  if (!out) {
544  av_frame_free(&in);
545  return AVERROR(ENOMEM);
546  }
547  av_frame_copy_props(out, in);
548  av_frame_copy(out, in);
549 
550  w = s->ww / s->w;
551  h = s->ww / s->h;
552 
553  if (s->wx >= 0) {
554  X = (in->width - s->ww) * s->wx;
555  } else {
556  X = (in->width - s->ww) * -s->wx;
557  }
558  if (s->wy >= 0) {
559  Y = (in->height - s->wh) * s->wy;
560  } else {
561  Y = (in->height - s->wh) * -s->wy;
562  }
563 
564  if (s->wx < 0) {
565  if (s->x + s->w >= X && (s->x + s->w <= X + s->ww) &&
566  s->y + s->h >= Y && (s->y + s->h <= Y + s->wh)) {
567  X = (in->width - s->ww) * (1 + s->wx);
568  }
569  }
570 
571  if (s->wy < 0) {
572  if (s->x + s->w >= X && (s->x + s->w <= X + s->ww) &&
573  s->y + s->h >= Y && (s->y + s->h <= Y + s->wh)) {
574  Y = (in->height - s->wh) * (1 + s->wy);
575  }
576  }
577 
578  ff_blend_rectangle(&s->draw, &s->dark, out->data, out->linesize,
579  out->width, out->height,
580  X,
581  Y,
582  s->ww,
583  s->wh);
584 
585  for (y = 0; y < s->h; y++) {
586  for (x = 0; x < s->w; x++) {
587  FFDrawColor color = { { 0 } };
588  int value[4] = { 0 };
589 
590  s->pick_color(&s->draw, &color, in, x + s->x, y + s->y, value);
591  ff_fill_rectangle(&s->draw, &color, out->data, out->linesize,
592  x * w + (s->ww - 4 - (s->w * w)) / 2 + X, y * h + 2 + Y, w, h);
593  for (i = 0; i < 4; i++) {
594  rms[i] += (double)value[i] * (double)value[i];
595  average[i] += value[i];
596  min[i] = FFMIN(min[i], value[i]);
597  max[i] = FFMAX(max[i], value[i]);
598  }
599  }
600  }
601 
602  ff_blend_rectangle(&s->draw, &s->black, out->data, out->linesize,
603  out->width, out->height,
604  s->x - 2, s->y - 2, s->w + 4, 1);
605 
606  ff_blend_rectangle(&s->draw, &s->white, out->data, out->linesize,
607  out->width, out->height,
608  s->x - 1, s->y - 1, s->w + 2, 1);
609 
610  ff_blend_rectangle(&s->draw, &s->white, out->data, out->linesize,
611  out->width, out->height,
612  s->x - 1, s->y - 1, 1, s->h + 2);
613 
614  ff_blend_rectangle(&s->draw, &s->black, out->data, out->linesize,
615  out->width, out->height,
616  s->x - 2, s->y - 2, 1, s->h + 4);
617 
618  ff_blend_rectangle(&s->draw, &s->white, out->data, out->linesize,
619  out->width, out->height,
620  s->x - 1, s->y + 1 + s->h, s->w + 3, 1);
621 
622  ff_blend_rectangle(&s->draw, &s->black, out->data, out->linesize,
623  out->width, out->height,
624  s->x - 2, s->y + 2 + s->h, s->w + 4, 1);
625 
626  ff_blend_rectangle(&s->draw, &s->white, out->data, out->linesize,
627  out->width, out->height,
628  s->x + 1 + s->w, s->y - 1, 1, s->h + 2);
629 
630  ff_blend_rectangle(&s->draw, &s->black, out->data, out->linesize,
631  out->width, out->height,
632  s->x + 2 + s->w, s->y - 2, 1, s->h + 5);
633 
634  for (i = 0; i < 4; i++) {
635  rms[i] /= s->w * s->h;
636  rms[i] = sqrt(rms[i]);
637  average[i] /= s->w * s->h;
638  }
639 
640  snprintf(text, sizeof(text), "CH AVG MIN MAX RMS\n");
641  draw_text(&s->draw, out, &s->white, X + 28, Y + s->ww + 20, text, 0);
642  for (i = 0; i < s->nb_comps; i++) {
643  int c = s->rgba_map[i];
644 
645  snprintf(text, sizeof(text), "%c %07.1f %05d %05d %07.1f\n", s->is_rgb ? rgba[i] : yuva[i], average[c], min[c], max[c], rms[c]);
646  draw_text(&s->draw, out, s->colors[i], X + 28, Y + s->ww + 20 * (i + 2), text, 0);
647  }
648 
649  av_frame_free(&in);
650  return ff_filter_frame(outlink, out);
651 }
652 
653 static const AVFilterPad pixscope_inputs[] = {
654  {
655  .name = "default",
656  .type = AVMEDIA_TYPE_VIDEO,
657  .filter_frame = pixscope_filter_frame,
658  .config_props = pixscope_config_input,
659  },
660  { NULL }
661 };
662 
663 static const AVFilterPad pixscope_outputs[] = {
664  {
665  .name = "default",
666  .type = AVMEDIA_TYPE_VIDEO,
667  },
668  { NULL }
669 };
670 
672  .name = "pixscope",
673  .description = NULL_IF_CONFIG_SMALL("Pixel data analysis."),
674  .priv_size = sizeof(PixscopeContext),
675  .priv_class = &pixscope_class,
677  .inputs = pixscope_inputs,
678  .outputs = pixscope_outputs,
680 };
681 
682 typedef struct PixelValues {
683  uint16_t p[4];
684 } PixelValues;
685 
686 typedef struct OscilloscopeContext {
687  const AVClass *class;
688 
689  float xpos, ypos;
690  float tx, ty;
691  float size;
692  float tilt;
693  float theight, twidth;
694  float o;
696  int grid;
698  int scope;
699 
700  int x1, y1, x2, y2;
701  int ox, oy;
702  int height, width;
703 
704  int max;
706  int nb_comps;
707  int is_rgb;
708  uint8_t rgba_map[4];
719  FFDrawColor *colors[4];
720 
723 
725  void (*draw_trace)(struct OscilloscopeContext *s, AVFrame *frame);
727 
728 #define OOFFSET(x) offsetof(OscilloscopeContext, x)
729 
730 static const AVOption oscilloscope_options[] = {
731  { "x", "set scope x position", OOFFSET(xpos), AV_OPT_TYPE_FLOAT, {.dbl=0.5}, 0, 1, FLAGSR },
732  { "y", "set scope y position", OOFFSET(ypos), AV_OPT_TYPE_FLOAT, {.dbl=0.5}, 0, 1, FLAGSR },
733  { "s", "set scope size", OOFFSET(size), AV_OPT_TYPE_FLOAT, {.dbl=0.8}, 0, 1, FLAGSR },
734  { "t", "set scope tilt", OOFFSET(tilt), AV_OPT_TYPE_FLOAT, {.dbl=0.5}, 0, 1, FLAGSR },
735  { "o", "set trace opacity", OOFFSET(o), AV_OPT_TYPE_FLOAT, {.dbl=0.8}, 0, 1, FLAGSR },
736  { "tx", "set trace x position", OOFFSET(tx), AV_OPT_TYPE_FLOAT, {.dbl=0.5}, 0, 1, FLAGSR },
737  { "ty", "set trace y position", OOFFSET(ty), AV_OPT_TYPE_FLOAT, {.dbl=0.9}, 0, 1, FLAGSR },
738  { "tw", "set trace width", OOFFSET(twidth), AV_OPT_TYPE_FLOAT, {.dbl=0.8},.1, 1, FLAGSR },
739  { "th", "set trace height", OOFFSET(theight), AV_OPT_TYPE_FLOAT, {.dbl=0.3},.1, 1, FLAGSR },
740  { "c", "set components to trace", OOFFSET(components), AV_OPT_TYPE_INT, {.i64=7}, 0, 15, FLAGSR },
741  { "g", "draw trace grid", OOFFSET(grid), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, FLAGSR },
742  { "st", "draw statistics", OOFFSET(statistics), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, FLAGSR },
743  { "sc", "draw scope", OOFFSET(scope), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, FLAGSR },
744  { NULL }
745 };
746 
747 AVFILTER_DEFINE_CLASS(oscilloscope);
748 
750 {
751  OscilloscopeContext *s = ctx->priv;
752 
753  av_freep(&s->values);
754 }
755 
756 static void draw_line(FFDrawContext *draw, int x0, int y0, int x1, int y1,
758 {
759  int dx = FFABS(x1 - x0), sx = x0 < x1 ? 1 : -1;
760  int dy = FFABS(y1 - y0), sy = y0 < y1 ? 1 : -1;
761  int err = (dx > dy ? dx : -dy) / 2, e2;
762  int p, i;
763 
764  for (;;) {
765  if (x0 >= 0 && y0 >= 0 && x0 < out->width && y0 < out->height) {
766  for (p = 0; p < draw->nb_planes; p++) {
767  if (draw->desc->comp[p].depth == 8) {
768  if (draw->nb_planes == 1) {
769  for (i = 0; i < 4; i++) {
770  out->data[0][y0 * out->linesize[0] + x0 * draw->pixelstep[0] + i] = color->comp[0].u8[i];
771  }
772  } else {
773  out->data[p][out->linesize[p] * (y0 >> draw->vsub[p]) + (x0 >> draw->hsub[p])] = color->comp[p].u8[0];
774  }
775  } else {
776  if (draw->nb_planes == 1) {
777  for (i = 0; i < 4; i++) {
778  AV_WN16(out->data[0] + y0 * out->linesize[0] + 2 * (x0 * draw->pixelstep[0] + i), color->comp[0].u16[i]);
779  }
780  } else {
781  AV_WN16(out->data[p] + out->linesize[p] * (y0 >> draw->vsub[p]) + (x0 >> draw->hsub[p]) * 2, color->comp[p].u16[0]);
782  }
783  }
784  }
785  }
786 
787  if (x0 == x1 && y0 == y1)
788  break;
789 
790  e2 = err;
791 
792  if (e2 >-dx) {
793  err -= dy;
794  x0 += sx;
795  }
796 
797  if (e2 < dy) {
798  err += dx;
799  y0 += sy;
800  }
801  }
802 }
803 
805 {
806  int i, c;
807 
808  for (i = 1; i < s->nb_values; i++) {
809  for (c = 0; c < s->nb_comps; c++) {
810  if ((1 << c) & s->components) {
811  int x = i * s->width / s->nb_values;
812  int px = (i - 1) * s->width / s->nb_values;
813  int py = s->height - s->values[i-1].p[s->rgba_map[c]] * s->height / 256;
814  int y = s->height - s->values[i].p[s->rgba_map[c]] * s->height / 256;
815 
816  draw_line(&s->draw, s->ox + x, s->oy + y, s->ox + px, s->oy + py, frame, s->colors[c]);
817  }
818  }
819  }
820 }
821 
822 
824 {
825  int i, c;
826 
827  for (i = 1; i < s->nb_values; i++) {
828  for (c = 0; c < s->nb_comps; c++) {
829  if ((1 << c) & s->components) {
830  int x = i * s->width / s->nb_values;
831  int px = (i - 1) * s->width / s->nb_values;
832  int py = s->height - s->values[i-1].p[s->rgba_map[c]] * s->height / s->max;
833  int y = s->height - s->values[i].p[s->rgba_map[c]] * s->height / s->max;
834 
835  draw_line(&s->draw, s->ox + x, s->oy + y, s->ox + px, s->oy + py, frame, s->colors[c]);
836  }
837  }
838  }
839 }
840 
842 {
843  OscilloscopeContext *s = ctx->priv;
844  AVFilterLink *inlink = ctx->inputs[0];
845  int cx, cy, size;
846  double tilt;
847 
848  ff_draw_color(&s->draw, &s->dark, (uint8_t[]){ 0, 0, 0, s->o * 255} );
849  s->height = s->theight * inlink->h;
850  s->width = s->twidth * inlink->w;
851  size = hypot(inlink->w, inlink->h);
852  size *= s->size;
853  tilt = (s->tilt - 0.5) * M_PI;
854  cx = s->xpos * (inlink->w - 1);
855  cy = s->ypos * (inlink->h - 1);
856  s->x1 = cx - size / 2.0 * cos(tilt);
857  s->x2 = cx + size / 2.0 * cos(tilt);
858  s->y1 = cy - size / 2.0 * sin(tilt);
859  s->y2 = cy + size / 2.0 * sin(tilt);
860  s->ox = (inlink->w - s->width) * s->tx;
861  s->oy = (inlink->h - s->height) * s->ty;
862 }
863 
865 {
866  OscilloscopeContext *s = inlink->dst->priv;
867  int size;
868 
870  ff_draw_init(&s->draw, inlink->format, 0);
871  ff_draw_color(&s->draw, &s->black, (uint8_t[]){ 0, 0, 0, 255} );
872  ff_draw_color(&s->draw, &s->white, (uint8_t[]){ 255, 255, 255, 255} );
873  ff_draw_color(&s->draw, &s->green, (uint8_t[]){ 0, 255, 0, 255} );
874  ff_draw_color(&s->draw, &s->blue, (uint8_t[]){ 0, 0, 255, 255} );
875  ff_draw_color(&s->draw, &s->red, (uint8_t[]){ 255, 0, 0, 255} );
876  ff_draw_color(&s->draw, &s->cyan, (uint8_t[]){ 0, 255, 255, 255} );
877  ff_draw_color(&s->draw, &s->magenta, (uint8_t[]){ 255, 0, 255, 255} );
878  ff_draw_color(&s->draw, &s->gray, (uint8_t[]){ 128, 128, 128, 255} );
879  s->nb_comps = s->draw.desc->nb_components;
881 
882  if (s->is_rgb) {
883  s->colors[0] = &s->red;
884  s->colors[1] = &s->green;
885  s->colors[2] = &s->blue;
886  s->colors[3] = &s->white;
887  ff_fill_rgba_map(s->rgba_map, inlink->format);
888  } else {
889  s->colors[0] = &s->white;
890  s->colors[1] = &s->cyan;
891  s->colors[2] = &s->magenta;
892  s->colors[3] = &s->white;
893  s->rgba_map[0] = 0;
894  s->rgba_map[1] = 1;
895  s->rgba_map[2] = 2;
896  s->rgba_map[3] = 3;
897  }
898 
899  if (s->draw.desc->comp[0].depth <= 8) {
900  s->pick_color = pick_color8;
901  s->draw_trace = draw_trace8;
902  } else {
905  }
906 
907  s->max = (1 << s->draw.desc->comp[0].depth);
908  size = hypot(inlink->w, inlink->h);
909 
910  s->values = av_calloc(size, sizeof(*s->values));
911  if (!s->values)
912  return AVERROR(ENOMEM);
913 
914  update_oscilloscope(inlink->dst);
915 
916  return 0;
917 }
918 
919 static void draw_scope(OscilloscopeContext *s, int x0, int y0, int x1, int y1,
920  AVFrame *out, PixelValues *p, int state)
921 {
922  int dx = FFABS(x1 - x0), sx = x0 < x1 ? 1 : -1;
923  int dy = FFABS(y1 - y0), sy = y0 < y1 ? 1 : -1;
924  int err = (dx > dy ? dx : -dy) / 2, e2;
925 
926  for (;;) {
927  if (x0 >= 0 && y0 >= 0 && x0 < out->width && y0 < out->height) {
928  FFDrawColor color = { { 0 } };
929  int value[4] = { 0 };
930 
931  s->pick_color(&s->draw, &color, out, x0, y0, value);
932  s->values[s->nb_values].p[0] = value[0];
933  s->values[s->nb_values].p[1] = value[1];
934  s->values[s->nb_values].p[2] = value[2];
935  s->values[s->nb_values].p[3] = value[3];
936  s->nb_values++;
937 
938  if (s->scope) {
939  if (s->draw.desc->comp[0].depth == 8) {
940  if (s->draw.nb_planes == 1) {
941  int i;
942 
943  for (i = 0; i < s->draw.pixelstep[0]; i++)
944  out->data[0][out->linesize[0] * y0 + x0 * s->draw.pixelstep[0] + i] = 255 * ((s->nb_values + state) & 1);
945  } else {
946  out->data[0][out->linesize[0] * y0 + x0] = 255 * ((s->nb_values + state) & 1);
947  }
948  } else {
949  if (s->draw.nb_planes == 1) {
950  int i;
951 
952  for (i = 0; i < s->draw.pixelstep[0]; i++)
953  AV_WN16(out->data[0] + out->linesize[0] * y0 + 2 * x0 * (s->draw.pixelstep[0] + i), (s->max - 1) * ((s->nb_values + state) & 1));
954  } else {
955  AV_WN16(out->data[0] + out->linesize[0] * y0 + 2 * x0, (s->max - 1) * ((s->nb_values + state) & 1));
956  }
957  }
958  }
959  }
960 
961  if (x0 == x1 && y0 == y1)
962  break;
963 
964  e2 = err;
965 
966  if (e2 >-dx) {
967  err -= dy;
968  x0 += sx;
969  }
970 
971  if (e2 < dy) {
972  err += dx;
973  y0 += sy;
974  }
975  }
976 }
977 
979 {
980  AVFilterContext *ctx = inlink->dst;
981  OscilloscopeContext *s = ctx->priv;
982  AVFilterLink *outlink = ctx->outputs[0];
983  float average[4] = { 0 };
984  int max[4] = { 0 };
985  int min[4] = { INT_MAX, INT_MAX, INT_MAX, INT_MAX };
986  int i, c;
987 
988  s->nb_values = 0;
989  draw_scope(s, s->x1, s->y1, s->x2, s->y2, frame, s->values, inlink->frame_count_in & 1);
990  ff_blend_rectangle(&s->draw, &s->dark, frame->data, frame->linesize,
991  frame->width, frame->height,
992  s->ox, s->oy, s->width, s->height + 20 * s->statistics);
993 
994  if (s->grid && outlink->h >= 10) {
995  ff_fill_rectangle(&s->draw, &s->gray, frame->data, frame->linesize,
996  s->ox, s->oy, s->width - 1, 1);
997 
998  for (i = 1; i < 5; i++) {
999  ff_fill_rectangle(&s->draw, &s->gray, frame->data, frame->linesize,
1000  s->ox, s->oy + i * (s->height - 1) / 4, s->width, 1);
1001  }
1002 
1003  for (i = 0; i < 10; i++) {
1004  ff_fill_rectangle(&s->draw, &s->gray, frame->data, frame->linesize,
1005  s->ox + i * (s->width - 1) / 10, s->oy, 1, s->height);
1006  }
1007 
1008  ff_fill_rectangle(&s->draw, &s->gray, frame->data, frame->linesize,
1009  s->ox + s->width - 1, s->oy, 1, s->height);
1010  }
1011 
1012  s->draw_trace(s, frame);
1013 
1014  for (i = 0; i < s->nb_values; i++) {
1015  for (c = 0; c < s->nb_comps; c++) {
1016  if ((1 << c) & s->components) {
1017  max[c] = FFMAX(max[c], s->values[i].p[s->rgba_map[c]]);
1018  min[c] = FFMIN(min[c], s->values[i].p[s->rgba_map[c]]);
1019  average[c] += s->values[i].p[s->rgba_map[c]];
1020  }
1021  }
1022  }
1023  for (c = 0; c < s->nb_comps; c++) {
1024  average[c] /= s->nb_values;
1025  }
1026 
1027  if (s->statistics && s->height > 10 && s->width > 280 * av_popcount(s->components)) {
1028  for (c = 0, i = 0; c < s->nb_comps; c++) {
1029  if ((1 << c) & s->components) {
1030  const char rgba[4] = { 'R', 'G', 'B', 'A' };
1031  const char yuva[4] = { 'Y', 'U', 'V', 'A' };
1032  char text[128];
1033 
1034  snprintf(text, sizeof(text), "%c avg:%.1f min:%d max:%d\n", s->is_rgb ? rgba[c] : yuva[c], average[c], min[c], max[c]);
1035  draw_text(&s->draw, frame, &s->white, s->ox + 2 + 280 * i++, s->oy + s->height + 4, text, 0);
1036  }
1037  }
1038  }
1039 
1040  return ff_filter_frame(outlink, frame);
1041 }
1042 
1043 static int oscilloscope_process_command(AVFilterContext *ctx, const char *cmd, const char *args,
1044  char *res, int res_len, int flags)
1045 {
1046  int ret;
1047 
1048  ret = ff_filter_process_command(ctx, cmd, args, res, res_len, flags);
1049  if (ret < 0)
1050  return ret;
1051 
1052  update_oscilloscope(ctx);
1053 
1054  return 0;
1055 }
1056 
1058  {
1059  .name = "default",
1060  .type = AVMEDIA_TYPE_VIDEO,
1061  .filter_frame = oscilloscope_filter_frame,
1062  .config_props = oscilloscope_config_input,
1063  .needs_writable = 1,
1064  },
1065  { NULL }
1066 };
1067 
1069  {
1070  .name = "default",
1071  .type = AVMEDIA_TYPE_VIDEO,
1072  },
1073  { NULL }
1074 };
1075 
1077  .name = "oscilloscope",
1078  .description = NULL_IF_CONFIG_SMALL("2D Video Oscilloscope."),
1079  .priv_size = sizeof(OscilloscopeContext),
1080  .priv_class = &oscilloscope_class,
1083  .inputs = oscilloscope_inputs,
1084  .outputs = oscilloscope_outputs,
1087 };
AVFilterFormats * ff_draw_supported_pixel_formats(unsigned flags)
Return the list of pixel formats supported by the draw functions.
Definition: drawutils.c:731
#define NULL
Definition: coverity.c:32
FFDrawContext draw
Definition: vf_datascope.c:709
#define P
static struct @305 state
AVFrame * out
Definition: af_adeclick.c:494
This structure describes decoded (raw) audio or video data.
Definition: frame.h:295
static const AVOption pixscope_options[]
Definition: vf_datascope.c:460
AVOption.
Definition: opt.h:246
uint16_t u16[8]
Definition: drawutils.h:66
uint8_t hsub[MAX_PLANES]
Definition: drawutils.h:54
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:182
int av_pix_fmt_count_planes(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2589
Main libavfilter public API header.
void(* draw_trace)(struct OscilloscopeContext *s, AVFrame *frame)
Definition: vf_datascope.c:725
static int filter_color2(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_datascope.c:175
FFDrawColor magenta
Definition: vf_datascope.c:717
FFDrawColor * colors[4]
Definition: vf_datascope.c:719
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:99
AVFilter ff_vf_pixscope
Definition: vf_datascope.c:671
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate.The lists are not just lists
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_RL16
Definition: bytestream.h:87
void * av_calloc(size_t nmemb, size_t size)
Non-inlined equivalent of av_mallocz_array().
Definition: mem.c:244
static int filter_mono(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_datascope.c:257
#define AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC
Some filters support a generic "enable" expression option that can be used to enable or disable a fil...
Definition: avfilter.h:125
FFDrawColor black
Definition: vf_datascope.c:448
const char * name
Pad name.
Definition: internal.h:60
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:346
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1075
#define OOFFSET(x)
Definition: vf_datascope.c:728
#define OFFSET(x)
Definition: vf_datascope.c:56
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
Definition: pixdesc.h:117
static void pick_color16(FFDrawContext *draw, FFDrawColor *color, AVFrame *in, int x, int y, int *value)
Definition: vf_datascope.c:125
uint8_t
static void pick_color8(FFDrawContext *draw, FFDrawColor *color, AVFrame *in, int x, int y, int *value)
Definition: vf_datascope.c:107
static av_cold int uninit(AVCodecContext *avctx)
Definition: crystalhd.c:279
FFDrawColor gray
Definition: vf_datascope.c:49
void(* reverse_color)(FFDrawContext *draw, FFDrawColor *color, FFDrawColor *reverse)
Definition: vf_datascope.c:52
AVOptions.
static const uint32_t color[16+AV_CLASS_CATEGORY_NB]
Definition: log.c:92
static uint32_t reverse(uint32_t num, int bits)
Definition: speedhq.c:565
static int filter_color(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_datascope.c:218
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
GLsizei GLboolean const GLfloat * value
Definition: opengl_enc.c:108
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:388
#define height
void(* pick_color)(FFDrawContext *draw, FFDrawColor *color, AVFrame *in, int x, int y, int *value)
Definition: vf_datascope.c:724
void(* pick_color)(FFDrawContext *draw, FFDrawColor *color, AVFrame *in, int x, int y, int *value)
Definition: vf_datascope.c:455
static const AVFilterPad inputs[]
Definition: vf_datascope.c:401
#define max(a, b)
Definition: cuda_runtime.h:33
ptrdiff_t size
Definition: opengl_enc.c:100
#define av_log(a,...)
A filter pad used for either input or output.
Definition: internal.h:54
static const AVOption datascope_options[]
Definition: vf_datascope.c:60
#define POFFSET(x)
Definition: vf_datascope.c:458
static void draw_trace16(OscilloscopeContext *s, AVFrame *frame)
Definition: vf_datascope.c:823
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:259
static void draw_trace8(OscilloscopeContext *s, AVFrame *frame)
Definition: vf_datascope.c:804
int width
Definition: frame.h:353
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
int ff_set_common_formats(AVFilterContext *ctx, AVFilterFormats *formats)
A helper for query_formats() which sets all links to the same list of formats.
Definition: formats.c:569
#define td
Definition: regdef.h:70
const uint8_t avpriv_cga_font[2048]
Definition: xga_font_data.c:29
#define AV_PIX_FMT_FLAG_RGB
The pixel format contains RGB-like data (as opposed to YUV/grayscale).
Definition: pixdesc.h:148
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:186
int ff_filter_process_command(AVFilterContext *ctx, const char *cmd, const char *arg, char *res, int res_len, int flags)
Generic processing of user supplied commands that are set in the same way as the filter options...
Definition: avfilter.c:869
void * priv
private data for use by the filter
Definition: avfilter.h:353
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:116
const char * arg
Definition: jacosubdec.c:66
void ff_draw_color(FFDrawContext *draw, FFDrawColor *color, const uint8_t rgba[4])
Prepare a color.
Definition: drawutils.c:231
simple assert() macros that are a bit more flexible than ISO C assert().
static const AVFilterPad oscilloscope_inputs[]
static const AVFilterPad oscilloscope_outputs[]
#define FFMAX(a, b)
Definition: common.h:94
uint8_t u8[16]
Definition: drawutils.h:67
static void reverse_color16(FFDrawContext *draw, FFDrawColor *color, FFDrawColor *reverse)
Definition: vf_datascope.c:155
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:800
FFDrawColor * colors[4]
Definition: vf_datascope.c:453
static void oscilloscope_uninit(AVFilterContext *ctx)
Definition: vf_datascope.c:749
uint64_t flags
Combination of AV_PIX_FMT_FLAG_...
Definition: pixdesc.h:106
FFDrawContext draw
Definition: vf_datascope.c:45
static void update_oscilloscope(AVFilterContext *ctx)
Definition: vf_datascope.c:841
static av_const double hypot(double x, double y)
Definition: libm.h:366
uint8_t nb_components
The number of components each pixel has, (1-4)
Definition: pixdesc.h:83
int(* filter)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_datascope.c:53
#define Y
Definition: boxblur.h:38
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:784
#define FFMIN(a, b)
Definition: common.h:96
uint8_t rgba_map[4]
Definition: vf_datascope.c:445
static int filter_frame(AVFilterLink *inlink, AVFrame *in)
Definition: vf_datascope.c:295
#define width
uint8_t w
Definition: llviddspenc.c:38
typedef void(APIENTRY *FF_PFNGLACTIVETEXTUREPROC)(GLenum texture)
AVFormatContext * ctx
Definition: movenc.c:48
static int config_input(AVFilterLink *inlink)
Definition: vf_datascope.c:359
static int query_formats(AVFilterContext *ctx)
Definition: vf_datascope.c:79
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
#define FFABS(a)
Absolute value, Note, INT_MIN / INT64_MIN result in undefined behavior as they are not representable ...
Definition: common.h:72
#define s(width, name)
Definition: cbs_vp9.c:257
AVFilter ff_vf_oscilloscope
void(* pick_color)(FFDrawContext *draw, FFDrawColor *color, AVFrame *in, int x, int y, int *value)
Definition: vf_datascope.c:51
int ff_fill_rgba_map(uint8_t *rgba_map, enum AVPixelFormat pix_fmt)
Definition: drawutils.c:35
AVFILTER_DEFINE_CLASS(datascope)
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i you should get a foobar png with Lena edge detected That s your new playground is ready Some little details about what s going which in turn will define variables for the build system and the C
#define FLAGS
Definition: vf_datascope.c:57
PixelValues * values
Definition: vf_datascope.c:722
void ff_blend_mask(FFDrawContext *draw, FFDrawColor *color, uint8_t *dst[], int dst_linesize[], int dst_w, int dst_h, const uint8_t *mask, int mask_linesize, int mask_w, int mask_h, int l2depth, unsigned endianness, int x0, int y0)
Blend an alpha mask with an uniform color.
Definition: drawutils.c:622
static const AVFilterPad outputs[]
Definition: vf_datascope.c:411
FFDrawColor dark
Definition: vf_datascope.c:447
misc drawing utilities
static int oscilloscope_process_command(AVFilterContext *ctx, const char *cmd, const char *args, char *res, int res_len, int flags)
Used for passing data between threads.
Definition: dsddec.c:64
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:326
unsigned nb_planes
Definition: drawutils.h:51
static const AVOption oscilloscope_options[]
Definition: vf_datascope.c:730
Definition: vf_addroi.c:26
static void draw_text(FFDrawContext *draw, AVFrame *frame, FFDrawColor *color, int x0, int y0, const uint8_t *text, int vertical)
Definition: vf_datascope.c:84
static const int16_t alpha[]
Definition: ilbcdata.h:55
FFDrawContext draw
Definition: vf_datascope.c:446
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi-0x80)*(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi-0x80)*(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(const int16_t *) pi >> 8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t,*(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t,*(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(const int32_t *) pi >> 24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t,*(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t,*(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31))))#define SET_CONV_FUNC_GROUP(ofmt, ifmt) static void set_generic_function(AudioConvert *ac){}void ff_audio_convert_free(AudioConvert **ac){if(!*ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);}AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, int sample_rate, int apply_map){AudioConvert *ac;int in_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) return NULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method!=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt) > 2){ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc){av_free(ac);return NULL;}return ac;}in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar){ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar?ac->channels:1;}else if(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;else ac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);return ac;}int ff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in){int use_generic=1;int len=in->nb_samples;int p;if(ac->dc){av_log(ac->avr, AV_LOG_TRACE,"%d samples - audio_convert: %s to %s (dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));return ff_convert_dither(ac-> in
static int pixscope_filter_frame(AVFilterLink *inlink, AVFrame *in)
Definition: vf_datascope.c:529
Describe the class of an AVClass context structure.
Definition: log.h:67
Filter definition.
Definition: avfilter.h:144
Rational number (pair of numerator and denominator).
Definition: rational.h:58
void ff_blend_rectangle(FFDrawContext *draw, FFDrawColor *color, uint8_t *dst[], int dst_linesize[], int dst_w, int dst_h, int x0, int y0, int w, int h)
Blend a rectangle with an uniform color.
Definition: drawutils.c:445
union FFDrawColor::@199 comp[MAX_PLANES]
const char * name
Filter name.
Definition: avfilter.h:148
int ff_draw_init(FFDrawContext *draw, enum AVPixelFormat format, unsigned flags)
Init a draw context.
Definition: drawutils.c:178
#define snprintf
Definition: snprintf.h:34
offset must point to two consecutive integers
Definition: opt.h:233
misc parsing utilities
FFDrawColor blue
Definition: vf_datascope.c:451
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:350
uint16_t p[4]
Definition: vf_datascope.c:683
#define flags(name, subs,...)
Definition: cbs_av1.c:564
AVFilterInternal * internal
An opaque struct for libavfilter internal use.
Definition: avfilter.h:378
#define FLAGSR
Definition: vf_datascope.c:58
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:309
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
int
static void draw_scope(OscilloscopeContext *s, int x0, int y0, int x1, int y1, AVFrame *out, PixelValues *p, int state)
Definition: vf_datascope.c:919
D(D(float, sse)
Definition: rematrix_init.c:28
FFDrawColor white
Definition: vf_datascope.c:47
static int config_output(AVFilterLink *outlink)
Definition: vf_datascope.c:390
static int oscilloscope_filter_frame(AVFilterLink *inlink, AVFrame *frame)
Definition: vf_datascope.c:978
static int pixscope_config_input(AVFilterLink *inlink)
Definition: vf_datascope.c:473
avfilter_execute_func * execute
Definition: internal.h:144
static int slice_end(AVCodecContext *avctx, AVFrame *pict)
Handle slice ends.
Definition: mpeg12dec.c:2043
FFDrawColor yellow
Definition: vf_datascope.c:46
FFDrawColor black
Definition: vf_datascope.c:48
AVFilter ff_vf_datascope
Definition: vf_datascope.c:420
static const AVFilterPad pixscope_outputs[]
Definition: vf_datascope.c:663
int pixelstep[MAX_PLANES]
Definition: drawutils.h:52
#define H
Definition: pixlet.c:39
const struct AVPixFmtDescriptor * desc
Definition: drawutils.h:49
void ff_fill_rectangle(FFDrawContext *draw, FFDrawColor *color, uint8_t *dst[], int dst_linesize[], int dst_x, int dst_y, int w, int h)
Fill a rectangle with an uniform color.
Definition: drawutils.c:318
An instance of a filter.
Definition: avfilter.h:338
int height
Definition: frame.h:353
FILE * out
Definition: movenc.c:54
uint8_t vsub[MAX_PLANES]
Definition: drawutils.h:55
static int process_command(AVFilterContext *ctx, const char *cmd, const char *args, char *res, int res_len, int flags)
Definition: af_afftdn.c:1374
#define av_freep(p)
#define AV_WN16(p, v)
Definition: intreadwrite.h:372
#define M_PI
Definition: mathematics.h:52
FFDrawColor red
Definition: vf_datascope.c:452
AVFrame * in
Definition: af_afftdn.c:1083
Definition: vf_addroi.c:26
static void draw_line(FFDrawContext *draw, int x0, int y0, int x1, int y1, AVFrame *out, FFDrawColor *color)
Definition: vf_datascope.c:756
internal API functions
static int oscilloscope_config_input(AVFilterLink *inlink)
Definition: vf_datascope.c:864
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
int depth
Number of bits in the component.
Definition: pixdesc.h:58
FFDrawColor green
Definition: vf_datascope.c:450
float min
static const AVFilterPad pixscope_inputs[]
Definition: vf_datascope.c:653
mode
Use these values in ebur128_init (or&#39;ed).
Definition: ebur128.h:83
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:659
CGA/EGA/VGA ROM font data.
static void reverse_color8(FFDrawContext *draw, FFDrawColor *color, FFDrawColor *reverse)
Definition: vf_datascope.c:143
uint8_t rgba[4]
Definition: drawutils.h:63
FFDrawColor white
Definition: vf_datascope.c:449