FFmpeg
avf_showwaves.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2012 Stefano Sabatini
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 /**
22  * @file
23  * audio to video multimedia filter
24  */
25 
26 #include "config_components.h"
27 
28 #include "libavutil/avassert.h"
29 #include "libavutil/avstring.h"
31 #include "libavutil/opt.h"
32 #include "libavutil/parseutils.h"
33 #include "avfilter.h"
34 #include "filters.h"
35 #include "formats.h"
36 #include "audio.h"
37 #include "video.h"
38 #include "internal.h"
39 
46 };
47 
54 };
55 
60 };
61 
66 };
67 
68 struct frame_node {
70  struct frame_node *next;
71 };
72 
73 typedef struct ShowWavesContext {
74  const AVClass *class;
75  int w, h;
77  char *colors;
78  int buf_idx;
79  int16_t *buf_idy; /* y coordinate of previous sample for each channel */
81  int n;
82  int pixstep;
84  int mode; ///< ShowWavesMode
85  int scale; ///< ShowWavesScale
86  int draw_mode; ///< ShowWavesDrawMode
89  uint8_t *fg;
90 
91  int (*get_h)(int16_t sample, int height);
92  void (*draw_sample)(uint8_t *buf, int height, int linesize,
93  int16_t *prev_y, const uint8_t color[4], int h);
94 
95  /* single picture */
99  int64_t total_samples;
100  int64_t *sum; /* abs sum of the samples per channel */
102 
103 #define OFFSET(x) offsetof(ShowWavesContext, x)
104 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
105 
106 static const AVOption showwaves_options[] = {
107  { "size", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "600x240"}, 0, 0, FLAGS },
108  { "s", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "600x240"}, 0, 0, FLAGS },
109  { "mode", "select display mode", OFFSET(mode), AV_OPT_TYPE_INT, {.i64=MODE_POINT}, 0, MODE_NB-1, FLAGS, "mode"},
110  { "point", "draw a point for each sample", 0, AV_OPT_TYPE_CONST, {.i64=MODE_POINT}, .flags=FLAGS, .unit="mode"},
111  { "line", "draw a line for each sample", 0, AV_OPT_TYPE_CONST, {.i64=MODE_LINE}, .flags=FLAGS, .unit="mode"},
112  { "p2p", "draw a line between samples", 0, AV_OPT_TYPE_CONST, {.i64=MODE_P2P}, .flags=FLAGS, .unit="mode"},
113  { "cline", "draw a centered line for each sample", 0, AV_OPT_TYPE_CONST, {.i64=MODE_CENTERED_LINE}, .flags=FLAGS, .unit="mode"},
114  { "n", "set how many samples to show in the same point", OFFSET(n), AV_OPT_TYPE_INT, {.i64 = 0}, 0, INT_MAX, FLAGS },
115  { "rate", "set video rate", OFFSET(rate), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"}, 0, INT_MAX, FLAGS },
116  { "r", "set video rate", OFFSET(rate), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"}, 0, INT_MAX, FLAGS },
117  { "split_channels", "draw channels separately", OFFSET(split_channels), AV_OPT_TYPE_BOOL, {.i64 = 0}, 0, 1, FLAGS },
118  { "colors", "set channels colors", OFFSET(colors), AV_OPT_TYPE_STRING, {.str = "red|green|blue|yellow|orange|lime|pink|magenta|brown" }, 0, 0, FLAGS },
119  { "scale", "set amplitude scale", OFFSET(scale), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, SCALE_NB-1, FLAGS, .unit="scale" },
120  { "lin", "linear", 0, AV_OPT_TYPE_CONST, {.i64=SCALE_LIN}, .flags=FLAGS, .unit="scale"},
121  { "log", "logarithmic", 0, AV_OPT_TYPE_CONST, {.i64=SCALE_LOG}, .flags=FLAGS, .unit="scale"},
122  { "sqrt", "square root", 0, AV_OPT_TYPE_CONST, {.i64=SCALE_SQRT}, .flags=FLAGS, .unit="scale"},
123  { "cbrt", "cubic root", 0, AV_OPT_TYPE_CONST, {.i64=SCALE_CBRT}, .flags=FLAGS, .unit="scale"},
124  { "draw", "set draw mode", OFFSET(draw_mode), AV_OPT_TYPE_INT, {.i64 = DRAW_SCALE}, 0, DRAW_NB-1, FLAGS, .unit="draw" },
125  { "scale", "scale pixel values for each drawn sample", 0, AV_OPT_TYPE_CONST, {.i64=DRAW_SCALE}, .flags=FLAGS, .unit="draw"},
126  { "full", "draw every pixel for sample directly", 0, AV_OPT_TYPE_CONST, {.i64=DRAW_FULL}, .flags=FLAGS, .unit="draw"},
127  { NULL }
128 };
129 
130 AVFILTER_DEFINE_CLASS(showwaves);
131 
133 {
134  ShowWavesContext *showwaves = ctx->priv;
135 
136  av_frame_free(&showwaves->outpicref);
137  av_freep(&showwaves->buf_idy);
138  av_freep(&showwaves->fg);
139 
140  if (showwaves->single_pic) {
141  struct frame_node *node = showwaves->audio_frames;
142  while (node) {
143  struct frame_node *tmp = node;
144 
145  node = node->next;
146  av_frame_free(&tmp->frame);
147  av_freep(&tmp);
148  }
149  av_freep(&showwaves->sum);
150  showwaves->last_frame = NULL;
151  }
152 }
153 
155 {
158  AVFilterLink *inlink = ctx->inputs[0];
159  AVFilterLink *outlink = ctx->outputs[0];
162  int ret;
163 
164  /* set input audio formats */
166  if ((ret = ff_formats_ref(formats, &inlink->outcfg.formats)) < 0)
167  return ret;
168 
170  if ((ret = ff_channel_layouts_ref(layouts, &inlink->outcfg.channel_layouts)) < 0)
171  return ret;
172 
174  if ((ret = ff_formats_ref(formats, &inlink->outcfg.samplerates)) < 0)
175  return ret;
176 
177  /* set output video format */
179  if ((ret = ff_formats_ref(formats, &outlink->incfg.formats)) < 0)
180  return ret;
181 
182  return 0;
183 }
184 
185 static int get_lin_h(int16_t sample, int height)
186 {
187  return height/2 - av_rescale(sample, height/2, INT16_MAX);
188 }
189 
190 static int get_lin_h2(int16_t sample, int height)
191 {
192  return av_rescale(FFABS(sample), height, INT16_MAX);
193 }
194 
195 static int get_log_h(int16_t sample, int height)
196 {
197  return height/2 - FFSIGN(sample) * (log10(1 + FFABS(sample)) * (height/2) / log10(1 + INT16_MAX));
198 }
199 
200 static int get_log_h2(int16_t sample, int height)
201 {
202  return log10(1 + FFABS(sample)) * height / log10(1 + INT16_MAX);
203 }
204 
205 static int get_sqrt_h(int16_t sample, int height)
206 {
207  return height/2 - FFSIGN(sample) * (sqrt(FFABS(sample)) * (height/2) / sqrt(INT16_MAX));
208 }
209 
210 static int get_sqrt_h2(int16_t sample, int height)
211 {
212  return sqrt(FFABS(sample)) * height / sqrt(INT16_MAX);
213 }
214 
215 static int get_cbrt_h(int16_t sample, int height)
216 {
217  return height/2 - FFSIGN(sample) * (cbrt(FFABS(sample)) * (height/2) / cbrt(INT16_MAX));
218 }
219 
220 static int get_cbrt_h2(int16_t sample, int height)
221 {
222  return cbrt(FFABS(sample)) * height / cbrt(INT16_MAX);
223 }
224 
225 static void draw_sample_point_rgba_scale(uint8_t *buf, int height, int linesize,
226  int16_t *prev_y,
227  const uint8_t color[4], int h)
228 {
229  if (h >= 0 && h < height) {
230  buf[h * linesize + 0] += color[0];
231  buf[h * linesize + 1] += color[1];
232  buf[h * linesize + 2] += color[2];
233  buf[h * linesize + 3] += color[3];
234  }
235 }
236 
237 static void draw_sample_point_rgba_full(uint8_t *buf, int height, int linesize,
238  int16_t *prev_y,
239  const uint8_t color[4], int h)
240 {
241  if (h >= 0 && h < height) {
242  buf[h * linesize + 0] = color[0];
243  buf[h * linesize + 1] = color[1];
244  buf[h * linesize + 2] = color[2];
245  buf[h * linesize + 3] = color[3];
246  }
247 }
248 
249 static void draw_sample_line_rgba_scale(uint8_t *buf, int height, int linesize,
250  int16_t *prev_y,
251  const uint8_t color[4], int h)
252 {
253  int k;
254  int start = height/2;
255  int end = av_clip(h, 0, height-1);
256  if (start > end)
257  FFSWAP(int16_t, start, end);
258  for (k = start; k < end; k++) {
259  buf[k * linesize + 0] += color[0];
260  buf[k * linesize + 1] += color[1];
261  buf[k * linesize + 2] += color[2];
262  buf[k * linesize + 3] += color[3];
263  }
264 }
265 
266 static void draw_sample_line_rgba_full(uint8_t *buf, int height, int linesize,
267  int16_t *prev_y,
268  const uint8_t color[4], int h)
269 {
270  int k;
271  int start = height/2;
272  int end = av_clip(h, 0, height-1);
273  if (start > end)
274  FFSWAP(int16_t, start, end);
275  for (k = start; k < end; k++) {
276  buf[k * linesize + 0] = color[0];
277  buf[k * linesize + 1] = color[1];
278  buf[k * linesize + 2] = color[2];
279  buf[k * linesize + 3] = color[3];
280  }
281 }
282 
283 static void draw_sample_p2p_rgba_scale(uint8_t *buf, int height, int linesize,
284  int16_t *prev_y,
285  const uint8_t color[4], int h)
286 {
287  int k;
288  if (h >= 0 && h < height) {
289  buf[h * linesize + 0] += color[0];
290  buf[h * linesize + 1] += color[1];
291  buf[h * linesize + 2] += color[2];
292  buf[h * linesize + 3] += color[3];
293  if (*prev_y && h != *prev_y) {
294  int start = *prev_y;
295  int end = av_clip(h, 0, height-1);
296  if (start > end)
297  FFSWAP(int16_t, start, end);
298  for (k = start + 1; k < end; k++) {
299  buf[k * linesize + 0] += color[0];
300  buf[k * linesize + 1] += color[1];
301  buf[k * linesize + 2] += color[2];
302  buf[k * linesize + 3] += color[3];
303  }
304  }
305  }
306  *prev_y = h;
307 }
308 
309 static void draw_sample_p2p_rgba_full(uint8_t *buf, int height, int linesize,
310  int16_t *prev_y,
311  const uint8_t color[4], int h)
312 {
313  int k;
314  if (h >= 0 && h < height) {
315  buf[h * linesize + 0] = color[0];
316  buf[h * linesize + 1] = color[1];
317  buf[h * linesize + 2] = color[2];
318  buf[h * linesize + 3] = color[3];
319  if (*prev_y && h != *prev_y) {
320  int start = *prev_y;
321  int end = av_clip(h, 0, height-1);
322  if (start > end)
323  FFSWAP(int16_t, start, end);
324  for (k = start + 1; k < end; k++) {
325  buf[k * linesize + 0] = color[0];
326  buf[k * linesize + 1] = color[1];
327  buf[k * linesize + 2] = color[2];
328  buf[k * linesize + 3] = color[3];
329  }
330  }
331  }
332  *prev_y = h;
333 }
334 
335 static void draw_sample_cline_rgba_scale(uint8_t *buf, int height, int linesize,
336  int16_t *prev_y,
337  const uint8_t color[4], int h)
338 {
339  int k;
340  const int start = (height - h) / 2;
341  const int end = start + h;
342  for (k = start; k < end; k++) {
343  buf[k * linesize + 0] += color[0];
344  buf[k * linesize + 1] += color[1];
345  buf[k * linesize + 2] += color[2];
346  buf[k * linesize + 3] += color[3];
347  }
348 }
349  static void draw_sample_cline_rgba_full(uint8_t *buf, int height, int linesize,
350  int16_t *prev_y,
351  const uint8_t color[4], int h)
352 {
353  int k;
354  const int start = (height - h) / 2;
355  const int end = start + h;
356  for (k = start; k < end; k++) {
357  buf[k * linesize + 0] = color[0];
358  buf[k * linesize + 1] = color[1];
359  buf[k * linesize + 2] = color[2];
360  buf[k * linesize + 3] = color[3];
361  }
362 }
363 
364 static void draw_sample_point_gray(uint8_t *buf, int height, int linesize,
365  int16_t *prev_y,
366  const uint8_t color[4], int h)
367 {
368  if (h >= 0 && h < height)
369  buf[h * linesize] += color[0];
370 }
371 
372 static void draw_sample_line_gray(uint8_t *buf, int height, int linesize,
373  int16_t *prev_y,
374  const uint8_t color[4], int h)
375 {
376  int k;
377  int start = height/2;
378  int end = av_clip(h, 0, height-1);
379  if (start > end)
380  FFSWAP(int16_t, start, end);
381  for (k = start; k < end; k++)
382  buf[k * linesize] += color[0];
383 }
384 
385 static void draw_sample_p2p_gray(uint8_t *buf, int height, int linesize,
386  int16_t *prev_y,
387  const uint8_t color[4], int h)
388 {
389  int k;
390  if (h >= 0 && h < height) {
391  buf[h * linesize] += color[0];
392  if (*prev_y && h != *prev_y) {
393  int start = *prev_y;
394  int end = av_clip(h, 0, height-1);
395  if (start > end)
396  FFSWAP(int16_t, start, end);
397  for (k = start + 1; k < end; k++)
398  buf[k * linesize] += color[0];
399  }
400  }
401  *prev_y = h;
402 }
403 
404 static void draw_sample_cline_gray(uint8_t *buf, int height, int linesize,
405  int16_t *prev_y,
406  const uint8_t color[4], int h)
407 {
408  int k;
409  const int start = (height - h) / 2;
410  const int end = start + h;
411  for (k = start; k < end; k++)
412  buf[k * linesize] += color[0];
413 }
414 
415 static int config_output(AVFilterLink *outlink)
416 {
417  AVFilterContext *ctx = outlink->src;
418  AVFilterLink *inlink = ctx->inputs[0];
419  ShowWavesContext *showwaves = ctx->priv;
420  int nb_channels = inlink->ch_layout.nb_channels;
421  char *colors, *saveptr = NULL;
422  uint8_t x;
423  int ch;
424 
425  if (showwaves->single_pic)
426  showwaves->n = 1;
427 
428  if (!showwaves->n)
429  showwaves->n = FFMAX(1, av_rescale_q(inlink->sample_rate, av_make_q(1, showwaves->w), showwaves->rate));
430 
431  showwaves->buf_idx = 0;
432  if (!FF_ALLOCZ_TYPED_ARRAY(showwaves->buf_idy, nb_channels)) {
433  av_log(ctx, AV_LOG_ERROR, "Could not allocate showwaves buffer\n");
434  return AVERROR(ENOMEM);
435  }
436  outlink->w = showwaves->w;
437  outlink->h = showwaves->h;
438  outlink->sample_aspect_ratio = (AVRational){1,1};
439 
440  if (showwaves->single_pic)
441  outlink->frame_rate = av_make_q(1, 1);
442  else
443  outlink->frame_rate = av_div_q((AVRational){inlink->sample_rate,showwaves->n},
444  (AVRational){showwaves->w,1});
445 
446  av_log(ctx, AV_LOG_VERBOSE, "s:%dx%d r:%f n:%d\n",
447  showwaves->w, showwaves->h, av_q2d(outlink->frame_rate), showwaves->n);
448 
449  switch (outlink->format) {
450  case AV_PIX_FMT_GRAY8:
451  switch (showwaves->mode) {
452  case MODE_POINT: showwaves->draw_sample = draw_sample_point_gray; break;
453  case MODE_LINE: showwaves->draw_sample = draw_sample_line_gray; break;
454  case MODE_P2P: showwaves->draw_sample = draw_sample_p2p_gray; break;
455  case MODE_CENTERED_LINE: showwaves->draw_sample = draw_sample_cline_gray; break;
456  default:
457  return AVERROR_BUG;
458  }
459  showwaves->pixstep = 1;
460  break;
461  case AV_PIX_FMT_RGBA:
462  switch (showwaves->mode) {
467  default:
468  return AVERROR_BUG;
469  }
470  showwaves->pixstep = 4;
471  break;
472  }
473 
474  switch (showwaves->scale) {
475  case SCALE_LIN:
476  switch (showwaves->mode) {
477  case MODE_POINT:
478  case MODE_LINE:
479  case MODE_P2P: showwaves->get_h = get_lin_h; break;
480  case MODE_CENTERED_LINE: showwaves->get_h = get_lin_h2; break;
481  default:
482  return AVERROR_BUG;
483  }
484  break;
485  case SCALE_LOG:
486  switch (showwaves->mode) {
487  case MODE_POINT:
488  case MODE_LINE:
489  case MODE_P2P: showwaves->get_h = get_log_h; break;
490  case MODE_CENTERED_LINE: showwaves->get_h = get_log_h2; break;
491  default:
492  return AVERROR_BUG;
493  }
494  break;
495  case SCALE_SQRT:
496  switch (showwaves->mode) {
497  case MODE_POINT:
498  case MODE_LINE:
499  case MODE_P2P: showwaves->get_h = get_sqrt_h; break;
500  case MODE_CENTERED_LINE: showwaves->get_h = get_sqrt_h2; break;
501  default:
502  return AVERROR_BUG;
503  }
504  break;
505  case SCALE_CBRT:
506  switch (showwaves->mode) {
507  case MODE_POINT:
508  case MODE_LINE:
509  case MODE_P2P: showwaves->get_h = get_cbrt_h; break;
510  case MODE_CENTERED_LINE: showwaves->get_h = get_cbrt_h2; break;
511  default:
512  return AVERROR_BUG;
513  }
514  break;
515  }
516 
517  showwaves->fg = av_malloc_array(nb_channels, 4 * sizeof(*showwaves->fg));
518  if (!showwaves->fg)
519  return AVERROR(ENOMEM);
520 
521  colors = av_strdup(showwaves->colors);
522  if (!colors)
523  return AVERROR(ENOMEM);
524 
525  if (showwaves->draw_mode == DRAW_SCALE) {
526  /* multiplication factor, pre-computed to avoid in-loop divisions */
527  x = 255 / ((showwaves->split_channels ? 1 : nb_channels) * showwaves->n);
528  } else {
529  x = 255;
530  }
531  if (outlink->format == AV_PIX_FMT_RGBA) {
532  uint8_t fg[4] = { 0xff, 0xff, 0xff, 0xff };
533 
534  for (ch = 0; ch < nb_channels; ch++) {
535  char *color;
536 
537  color = av_strtok(ch == 0 ? colors : NULL, " |", &saveptr);
538  if (color)
539  av_parse_color(fg, color, -1, ctx);
540  showwaves->fg[4*ch + 0] = fg[0] * x / 255.;
541  showwaves->fg[4*ch + 1] = fg[1] * x / 255.;
542  showwaves->fg[4*ch + 2] = fg[2] * x / 255.;
543  showwaves->fg[4*ch + 3] = fg[3] * x / 255.;
544  }
545  } else {
546  for (ch = 0; ch < nb_channels; ch++)
547  showwaves->fg[4 * ch + 0] = x;
548  }
549  av_free(colors);
550 
551  return 0;
552 }
553 
554 inline static int push_frame(AVFilterLink *outlink)
555 {
556  AVFilterContext *ctx = outlink->src;
557  AVFilterLink *inlink = ctx->inputs[0];
558  ShowWavesContext *showwaves = outlink->src->priv;
559  int nb_channels = inlink->ch_layout.nb_channels;
560  int ret, i;
561 
562  ret = ff_filter_frame(outlink, showwaves->outpicref);
563  showwaves->outpicref = NULL;
564  showwaves->buf_idx = 0;
565  for (i = 0; i < nb_channels; i++)
566  showwaves->buf_idy[i] = 0;
567  return ret;
568 }
569 
570 static int push_single_pic(AVFilterLink *outlink)
571 {
572  AVFilterContext *ctx = outlink->src;
573  AVFilterLink *inlink = ctx->inputs[0];
574  ShowWavesContext *showwaves = ctx->priv;
575  int64_t n = 0, column_max_samples = showwaves->total_samples / outlink->w;
576  int64_t remaining_samples = showwaves->total_samples - (column_max_samples * outlink->w);
577  int64_t last_column_samples = column_max_samples + remaining_samples;
578  AVFrame *out = showwaves->outpicref;
579  struct frame_node *node;
580  const int nb_channels = inlink->ch_layout.nb_channels;
581  const int ch_height = showwaves->split_channels ? outlink->h / nb_channels : outlink->h;
582  const int linesize = out->linesize[0];
583  const int pixstep = showwaves->pixstep;
584  int col = 0;
585  int64_t *sum = showwaves->sum;
586 
587  if (column_max_samples == 0) {
588  av_log(ctx, AV_LOG_ERROR, "Too few samples\n");
589  return AVERROR(EINVAL);
590  }
591 
592  av_log(ctx, AV_LOG_DEBUG, "Create frame averaging %"PRId64" samples per column\n", column_max_samples);
593 
594  memset(sum, 0, nb_channels);
595 
596  for (node = showwaves->audio_frames; node; node = node->next) {
597  int i;
598  const AVFrame *frame = node->frame;
599  const int16_t *p = (const int16_t *)frame->data[0];
600 
601  for (i = 0; i < frame->nb_samples; i++) {
602  int64_t max_samples = col == outlink->w - 1 ? last_column_samples: column_max_samples;
603  int ch;
604 
605  switch (showwaves->filter_mode) {
606  case FILTER_AVERAGE:
607  for (ch = 0; ch < nb_channels; ch++)
608  sum[ch] += abs(p[ch + i*nb_channels]);
609  break;
610  case FILTER_PEAK:
611  for (ch = 0; ch < nb_channels; ch++)
612  sum[ch] = FFMAX(sum[ch], abs(p[ch + i*nb_channels]));
613  break;
614  }
615 
616  n++;
617  if (n == max_samples) {
618  for (ch = 0; ch < nb_channels; ch++) {
619  int16_t sample = sum[ch] / (showwaves->filter_mode == FILTER_AVERAGE ? max_samples : 1);
620  uint8_t *buf = out->data[0] + col * pixstep;
621  int h;
622 
623  if (showwaves->split_channels)
624  buf += ch*ch_height*linesize;
625  av_assert0(col < outlink->w);
626  h = showwaves->get_h(sample, ch_height);
627  showwaves->draw_sample(buf, ch_height, linesize, &showwaves->buf_idy[ch], &showwaves->fg[ch * 4], h);
628  sum[ch] = 0;
629  }
630  col++;
631  n = 0;
632  }
633  }
634  }
635 
636  return push_frame(outlink);
637 }
638 
639 
640 static int request_frame(AVFilterLink *outlink)
641 {
642  ShowWavesContext *showwaves = outlink->src->priv;
643  AVFilterLink *inlink = outlink->src->inputs[0];
644  int ret;
645 
647  if (ret == AVERROR_EOF && showwaves->outpicref) {
648  if (showwaves->single_pic)
649  push_single_pic(outlink);
650  else
651  push_frame(outlink);
652  }
653 
654  return ret;
655 }
656 
657 static int alloc_out_frame(ShowWavesContext *showwaves, const int16_t *p,
658  const AVFilterLink *inlink, AVFilterLink *outlink,
659  const AVFrame *in)
660 {
661  if (!showwaves->outpicref) {
662  int j;
663  AVFrame *out = showwaves->outpicref =
664  ff_get_video_buffer(outlink, outlink->w, outlink->h);
665  if (!out)
666  return AVERROR(ENOMEM);
667  out->width = outlink->w;
668  out->height = outlink->h;
669  out->pts = in->pts + av_rescale_q((p - (int16_t *)in->data[0]) / inlink->ch_layout.nb_channels,
670  av_make_q(1, inlink->sample_rate),
671  outlink->time_base);
672  for (j = 0; j < outlink->h; j++)
673  memset(out->data[0] + j*out->linesize[0], 0, outlink->w * showwaves->pixstep);
674  }
675  return 0;
676 }
677 
679 {
680  ShowWavesContext *showwaves = ctx->priv;
681 
682  if (!strcmp(ctx->filter->name, "showwavespic")) {
683  showwaves->single_pic = 1;
684  showwaves->mode = MODE_CENTERED_LINE;
685  }
686 
687  return 0;
688 }
689 
690 #if CONFIG_SHOWWAVES_FILTER
691 
692 static int showwaves_filter_frame(AVFilterLink *inlink, AVFrame *insamples)
693 {
694  AVFilterContext *ctx = inlink->dst;
695  AVFilterLink *outlink = ctx->outputs[0];
696  ShowWavesContext *showwaves = ctx->priv;
697  const int nb_samples = insamples->nb_samples;
698  AVFrame *outpicref = showwaves->outpicref;
699  int16_t *p = (int16_t *)insamples->data[0];
700  int nb_channels = inlink->ch_layout.nb_channels;
701  int i, j, ret = 0;
702  const int pixstep = showwaves->pixstep;
703  const int n = showwaves->n;
704  const int ch_height = showwaves->split_channels ? outlink->h / nb_channels : outlink->h;
705 
706  /* draw data in the buffer */
707  for (i = 0; i < nb_samples; i++) {
708 
709  ret = alloc_out_frame(showwaves, p, inlink, outlink, insamples);
710  if (ret < 0)
711  goto end;
712  outpicref = showwaves->outpicref;
713 
714  for (j = 0; j < nb_channels; j++) {
715  uint8_t *buf = outpicref->data[0] + showwaves->buf_idx * pixstep;
716  const int linesize = outpicref->linesize[0];
717  int h;
718 
719  if (showwaves->split_channels)
720  buf += j*ch_height*linesize;
721  h = showwaves->get_h(*p++, ch_height);
722  showwaves->draw_sample(buf, ch_height, linesize,
723  &showwaves->buf_idy[j], &showwaves->fg[j * 4], h);
724  }
725 
726  showwaves->sample_count_mod++;
727  if (showwaves->sample_count_mod == n) {
728  showwaves->sample_count_mod = 0;
729  showwaves->buf_idx++;
730  }
731  if (showwaves->buf_idx == showwaves->w ||
732  (ff_outlink_get_status(inlink) && i == nb_samples - 1))
733  if ((ret = push_frame(outlink)) < 0)
734  break;
735  outpicref = showwaves->outpicref;
736  }
737 
738 end:
739  av_frame_free(&insamples);
740  return ret;
741 }
742 
743 static int activate(AVFilterContext *ctx)
744 {
745  AVFilterLink *inlink = ctx->inputs[0];
746  AVFilterLink *outlink = ctx->outputs[0];
747  ShowWavesContext *showwaves = ctx->priv;
748  AVFrame *in;
749  const int nb_samples = showwaves->n * outlink->w;
750  int ret;
751 
753 
754  ret = ff_inlink_consume_samples(inlink, nb_samples, nb_samples, &in);
755  if (ret < 0)
756  return ret;
757  if (ret > 0)
758  return showwaves_filter_frame(inlink, in);
759 
762 
763  return FFERROR_NOT_READY;
764 }
765 
766 static const AVFilterPad showwaves_inputs[] = {
767  {
768  .name = "default",
769  .type = AVMEDIA_TYPE_AUDIO,
770  },
771 };
772 
773 static const AVFilterPad showwaves_outputs[] = {
774  {
775  .name = "default",
776  .type = AVMEDIA_TYPE_VIDEO,
777  .config_props = config_output,
778  },
779 };
780 
781 const AVFilter ff_avf_showwaves = {
782  .name = "showwaves",
783  .description = NULL_IF_CONFIG_SMALL("Convert input audio to a video output."),
784  .init = init,
785  .uninit = uninit,
786  .priv_size = sizeof(ShowWavesContext),
787  FILTER_INPUTS(showwaves_inputs),
788  .activate = activate,
789  FILTER_OUTPUTS(showwaves_outputs),
791  .priv_class = &showwaves_class,
792 };
793 
794 #endif // CONFIG_SHOWWAVES_FILTER
795 
796 #if CONFIG_SHOWWAVESPIC_FILTER
797 
798 #define OFFSET(x) offsetof(ShowWavesContext, x)
799 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
800 
801 static const AVOption showwavespic_options[] = {
802  { "size", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "600x240"}, 0, 0, FLAGS },
803  { "s", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "600x240"}, 0, 0, FLAGS },
804  { "split_channels", "draw channels separately", OFFSET(split_channels), AV_OPT_TYPE_BOOL, {.i64 = 0}, 0, 1, FLAGS },
805  { "colors", "set channels colors", OFFSET(colors), AV_OPT_TYPE_STRING, {.str = "red|green|blue|yellow|orange|lime|pink|magenta|brown" }, 0, 0, FLAGS },
806  { "scale", "set amplitude scale", OFFSET(scale), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, SCALE_NB-1, FLAGS, .unit="scale" },
807  { "lin", "linear", 0, AV_OPT_TYPE_CONST, {.i64=SCALE_LIN}, .flags=FLAGS, .unit="scale"},
808  { "log", "logarithmic", 0, AV_OPT_TYPE_CONST, {.i64=SCALE_LOG}, .flags=FLAGS, .unit="scale"},
809  { "sqrt", "square root", 0, AV_OPT_TYPE_CONST, {.i64=SCALE_SQRT}, .flags=FLAGS, .unit="scale"},
810  { "cbrt", "cubic root", 0, AV_OPT_TYPE_CONST, {.i64=SCALE_CBRT}, .flags=FLAGS, .unit="scale"},
811  { "draw", "set draw mode", OFFSET(draw_mode), AV_OPT_TYPE_INT, {.i64 = DRAW_SCALE}, 0, DRAW_NB-1, FLAGS, .unit="draw" },
812  { "scale", "scale pixel values for each drawn sample", 0, AV_OPT_TYPE_CONST, {.i64=DRAW_SCALE}, .flags=FLAGS, .unit="draw"},
813  { "full", "draw every pixel for sample directly", 0, AV_OPT_TYPE_CONST, {.i64=DRAW_FULL}, .flags=FLAGS, .unit="draw"},
814  { "filter", "set filter mode", OFFSET(filter_mode), AV_OPT_TYPE_INT, {.i64 = FILTER_AVERAGE}, 0, FILTER_NB-1, FLAGS, .unit="filter" },
815  { "average", "use average samples", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_AVERAGE}, .flags=FLAGS, .unit="filter"},
816  { "peak", "use peak samples", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_PEAK}, .flags=FLAGS, .unit="filter"},
817  { NULL }
818 };
819 
820 AVFILTER_DEFINE_CLASS(showwavespic);
821 
822 static int showwavespic_config_input(AVFilterLink *inlink)
823 {
824  AVFilterContext *ctx = inlink->dst;
825  ShowWavesContext *showwaves = ctx->priv;
826 
827  if (showwaves->single_pic) {
828  showwaves->sum = av_calloc(inlink->ch_layout.nb_channels, sizeof(*showwaves->sum));
829  if (!showwaves->sum)
830  return AVERROR(ENOMEM);
831  }
832 
833  return 0;
834 }
835 
836 static int showwavespic_filter_frame(AVFilterLink *inlink, AVFrame *insamples)
837 {
838  AVFilterContext *ctx = inlink->dst;
839  AVFilterLink *outlink = ctx->outputs[0];
840  ShowWavesContext *showwaves = ctx->priv;
841  int16_t *p = (int16_t *)insamples->data[0];
842  int ret = 0;
843 
844  if (showwaves->single_pic) {
845  struct frame_node *f;
846 
847  ret = alloc_out_frame(showwaves, p, inlink, outlink, insamples);
848  if (ret < 0)
849  goto end;
850 
851  /* queue the audio frame */
852  f = av_malloc(sizeof(*f));
853  if (!f) {
854  ret = AVERROR(ENOMEM);
855  goto end;
856  }
857  f->frame = insamples;
858  f->next = NULL;
859  if (!showwaves->last_frame) {
860  showwaves->audio_frames =
861  showwaves->last_frame = f;
862  } else {
863  showwaves->last_frame->next = f;
864  showwaves->last_frame = f;
865  }
866  showwaves->total_samples += insamples->nb_samples;
867 
868  return 0;
869  }
870 
871 end:
872  av_frame_free(&insamples);
873  return ret;
874 }
875 
876 static const AVFilterPad showwavespic_inputs[] = {
877  {
878  .name = "default",
879  .type = AVMEDIA_TYPE_AUDIO,
880  .config_props = showwavespic_config_input,
881  .filter_frame = showwavespic_filter_frame,
882  },
883 };
884 
885 static const AVFilterPad showwavespic_outputs[] = {
886  {
887  .name = "default",
888  .type = AVMEDIA_TYPE_VIDEO,
889  .config_props = config_output,
890  .request_frame = request_frame,
891  },
892 };
893 
895  .name = "showwavespic",
896  .description = NULL_IF_CONFIG_SMALL("Convert input audio to a video output single picture."),
897  .init = init,
898  .uninit = uninit,
899  .priv_size = sizeof(ShowWavesContext),
900  FILTER_INPUTS(showwavespic_inputs),
901  FILTER_OUTPUTS(showwavespic_outputs),
903  .priv_class = &showwavespic_class,
904 };
905 
906 #endif // CONFIG_SHOWWAVESPIC_FILTER
FF_ALLOCZ_TYPED_ARRAY
#define FF_ALLOCZ_TYPED_ARRAY(p, nelem)
Definition: internal.h:98
formats
formats
Definition: signature.h:48
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:101
SCALE_SQRT
@ SCALE_SQRT
Definition: avf_showwaves.c:51
DRAW_SCALE
@ DRAW_SCALE
Definition: avf_showwaves.c:57
FILTER_PEAK
@ FILTER_PEAK
Definition: avf_showwaves.c:64
AVFilterChannelLayouts
A list of supported channel layouts.
Definition: formats.h:85
FILTER_AVERAGE
@ FILTER_AVERAGE
Definition: avf_showwaves.c:63
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
av_clip
#define av_clip
Definition: common.h:95
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
ff_make_format_list
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:380
get_cbrt_h
static int get_cbrt_h(int16_t sample, int height)
Definition: avf_showwaves.c:215
out
FILE * out
Definition: movenc.c:54
color
Definition: vf_paletteuse.c:600
ShowWavesContext::filter_mode
int filter_mode
Definition: avf_showwaves.c:88
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:999
sample_fmts
static enum AVSampleFormat sample_fmts[]
Definition: adpcmenc.c:947
ff_channel_layouts_ref
int ff_channel_layouts_ref(AVFilterChannelLayouts *f, AVFilterChannelLayouts **ref)
Add *ref as a new reference to f.
Definition: formats.c:591
layouts
enum MovChannelLayoutTag * layouts
Definition: mov_chan.c:330
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:57
av_parse_color
int av_parse_color(uint8_t *rgba_color, const char *color_string, int slen, void *log_ctx)
Put the RGBA values that correspond to color_string in rgba_color.
Definition: parseutils.c:356
FFERROR_NOT_READY
return FFERROR_NOT_READY
Definition: filter_design.txt:204
AV_OPT_TYPE_VIDEO_RATE
@ AV_OPT_TYPE_VIDEO_RATE
offset must point to AVRational
Definition: opt.h:238
av_div_q
AVRational av_div_q(AVRational b, AVRational c)
Divide one rational by another.
Definition: rational.c:88
frame_node::frame
AVFrame * frame
Definition: avf_showwaves.c:69
ShowWavesContext::get_h
int(* get_h)(int16_t sample, int height)
Definition: avf_showwaves.c:91
frame_node::next
struct frame_node * next
Definition: avf_showwaves.c:70
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
draw_sample_point_rgba_scale
static void draw_sample_point_rgba_scale(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:225
ShowWavesContext::buf_idy
int16_t * buf_idy
Definition: avf_showwaves.c:79
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:111
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:325
tmp
static uint8_t tmp[11]
Definition: aes_ctr.c:28
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:432
w
uint8_t w
Definition: llviddspenc.c:38
AVOption
AVOption.
Definition: opt.h:251
FILTER_QUERY_FUNC
#define FILTER_QUERY_FUNC(func)
Definition: internal.h:167
ff_request_frame
int ff_request_frame(AVFilterLink *link)
Request an input frame from the filter at the other end of the link.
Definition: avfilter.c:400
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
get_cbrt_h2
static int get_cbrt_h2(int16_t sample, int height)
Definition: avf_showwaves.c:220
ShowWavesContext::buf_idx
int buf_idx
Definition: avf_showwaves.c:78
ShowWavesContext::w
int w
Definition: avf_showwaves.c:75
showwaves_options
static const AVOption showwaves_options[]
Definition: avf_showwaves.c:106
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(showwaves)
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:175
frame_node
Definition: avf_showwaves.c:68
video.h
get_sqrt_h2
static int get_sqrt_h2(int16_t sample, int height)
Definition: avf_showwaves.c:210
FF_FILTER_FORWARD_STATUS_BACK
#define FF_FILTER_FORWARD_STATUS_BACK(outlink, inlink)
Forward the status on an output link to an input link.
Definition: filters.h:199
DRAW_FULL
@ DRAW_FULL
Definition: avf_showwaves.c:58
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:346
draw_sample_line_rgba_scale
static void draw_sample_line_rgba_scale(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:249
av_malloc
#define av_malloc(s)
Definition: tableprint_vlc.h:30
AVFilterFormats
A list of supported formats for one end of a filter link.
Definition: formats.h:64
formats.h
draw_sample_cline_rgba_full
static void draw_sample_cline_rgba_full(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:349
get_log_h2
static int get_log_h2(int16_t sample, int height)
Definition: avf_showwaves.c:200
AVFilterContext::priv
void * priv
private data for use by the filter
Definition: avfilter.h:423
MODE_NB
@ MODE_NB
Definition: avf_showwaves.c:45
FILTER_NB
@ FILTER_NB
Definition: avf_showwaves.c:65
get_sqrt_h
static int get_sqrt_h(int16_t sample, int height)
Definition: avf_showwaves.c:205
get_lin_h2
static int get_lin_h2(int16_t sample, int height)
Definition: avf_showwaves.c:190
FFSIGN
#define FFSIGN(a)
Definition: common.h:65
SCALE_CBRT
@ SCALE_CBRT
Definition: avf_showwaves.c:52
scale
static av_always_inline float scale(float x, float s)
Definition: vf_v360.c:1389
get_lin_h
static int get_lin_h(int16_t sample, int height)
Definition: avf_showwaves.c:185
ShowWavesContext::sample_count_mod
int sample_count_mod
Definition: avf_showwaves.c:83
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:49
cbrt
#define cbrt
Definition: tablegen.h:35
draw_sample_point_gray
static void draw_sample_point_gray(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:364
draw_sample_p2p_gray
static void draw_sample_p2p_gray(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:385
avassert.h
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
av_cold
#define av_cold
Definition: attributes.h:90
DRAW_NB
@ DRAW_NB
Definition: avf_showwaves.c:59
FLAGS
#define FLAGS
Definition: avf_showwaves.c:104
ShowWavesContext::sum
int64_t * sum
Definition: avf_showwaves.c:100
ShowWavesContext::last_frame
struct frame_node * last_frame
Definition: avf_showwaves.c:98
AVMEDIA_TYPE_AUDIO
@ AVMEDIA_TYPE_AUDIO
Definition: avutil.h:202
ff_formats_ref
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:596
av_q2d
static double av_q2d(AVRational a)
Convert an AVRational to a double.
Definition: rational.h:104
ShowWavesContext::audio_frames
struct frame_node * audio_frames
Definition: avf_showwaves.c:97
av_strtok
char * av_strtok(char *s, const char *delim, char **saveptr)
Split the string into several tokens which can be accessed by successive calls to av_strtok().
Definition: avstring.c:189
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
filters.h
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:296
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
ctx
AVFormatContext * ctx
Definition: movenc.c:48
init
static av_cold int init(AVFilterContext *ctx)
Definition: avf_showwaves.c:678
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
ShowWavesContext::rate
AVRational rate
Definition: avf_showwaves.c:76
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: internal.h:190
AV_PIX_FMT_RGBA
@ AV_PIX_FMT_RGBA
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:93
FFABS
#define FFABS(a)
Absolute value, Note, INT_MIN / INT64_MIN result in undefined behavior as they are not representable ...
Definition: common.h:64
if
if(ret)
Definition: filter_design.txt:179
config_output
static int config_output(AVFilterLink *outlink)
Definition: avf_showwaves.c:415
push_frame
static int push_frame(AVFilterLink *outlink)
Definition: avf_showwaves.c:554
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
ShowWavesContext::scale
int scale
ShowWavesScale.
Definition: avf_showwaves.c:85
ff_inlink_consume_samples
int ff_inlink_consume_samples(AVFilterLink *link, unsigned min, unsigned max, AVFrame **rframe)
Take samples from the link's FIFO and update the link's stats.
Definition: avfilter.c:1413
NULL
#define NULL
Definition: coverity.c:32
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
get_log_h
static int get_log_h(int16_t sample, int height)
Definition: avf_showwaves.c:195
activate
filter_frame For filters that do not use the activate() callback
ShowWavesContext::single_pic
int single_pic
Definition: avf_showwaves.c:96
AV_OPT_TYPE_IMAGE_SIZE
@ AV_OPT_TYPE_IMAGE_SIZE
offset must point to two consecutive integers
Definition: opt.h:235
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:416
ShowWavesContext::mode
int mode
ShowWavesMode.
Definition: avf_showwaves.c:84
parseutils.h
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: avf_showwaves.c:132
ff_avf_showwavespic
const AVFilter ff_avf_showwavespic
ShowWavesContext::total_samples
int64_t total_samples
Definition: avf_showwaves.c:99
request_frame
static int request_frame(AVFilterLink *outlink)
Definition: avf_showwaves.c:640
query_formats
static int query_formats(AVFilterContext *ctx)
Definition: avf_showwaves.c:154
abs
#define abs(x)
Definition: cuda_runtime.h:35
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:74
MODE_POINT
@ MODE_POINT
Definition: avf_showwaves.c:41
ShowWavesContext::draw_mode
int draw_mode
ShowWavesDrawMode.
Definition: avf_showwaves.c:86
draw_sample_cline_gray
static void draw_sample_cline_gray(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:404
f
f
Definition: af_crystalizer.c:122
ShowWavesFilterMode
ShowWavesFilterMode
Definition: avf_showwaves.c:62
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:117
MODE_CENTERED_LINE
@ MODE_CENTERED_LINE
Definition: avf_showwaves.c:44
draw_sample_p2p_rgba_full
static void draw_sample_p2p_rgba_full(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:309
for
for(k=2;k<=8;++k)
Definition: h264pred_template.c:425
AV_SAMPLE_FMT_NONE
@ AV_SAMPLE_FMT_NONE
Definition: samplefmt.h:56
sample
#define sample
Definition: flacdsp_template.c:44
color
static const uint32_t color[16+AV_CLASS_CATEGORY_NB]
Definition: log.c:94
av_make_q
static AVRational av_make_q(int num, int den)
Create an AVRational.
Definition: rational.h:71
ShowWavesContext::n
int n
Definition: avf_showwaves.c:81
SCALE_LIN
@ SCALE_LIN
Definition: avf_showwaves.c:49
height
#define height
FF_FILTER_FORWARD_WANTED
FF_FILTER_FORWARD_WANTED(outlink, inlink)
ff_all_channel_layouts
AVFilterChannelLayouts * ff_all_channel_layouts(void)
Construct an empty AVFilterChannelLayouts/AVFilterFormats struct – representing any channel layout (w...
Definition: formats.c:557
MODE_P2P
@ MODE_P2P
Definition: avf_showwaves.c:43
ShowWavesContext::outpicref
AVFrame * outpicref
Definition: avf_showwaves.c:80
internal.h
ShowWavesContext::h
int h
Definition: avf_showwaves.c:75
ShowWavesScale
ShowWavesScale
Definition: avf_showwaves.c:48
push_single_pic
static int push_single_pic(AVFilterLink *outlink)
Definition: avf_showwaves.c:570
AVFrame::nb_samples
int nb_samples
number of audio samples (per channel) described by this frame
Definition: frame.h:405
ff_avf_showwaves
const AVFilter ff_avf_showwaves
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:31
AVSampleFormat
AVSampleFormat
Audio sample formats.
Definition: samplefmt.h:55
draw_sample_line_gray
static void draw_sample_line_gray(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:372
AV_SAMPLE_FMT_S16
@ AV_SAMPLE_FMT_S16
signed 16 bits
Definition: samplefmt.h:58
draw_sample_p2p_rgba_scale
static void draw_sample_p2p_rgba_scale(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:283
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:55
av_rescale
int64_t av_rescale(int64_t a, int64_t b, int64_t c)
Rescale a 64-bit integer with rounding to nearest.
Definition: mathematics.c:129
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:272
AVFilter
Filter definition.
Definition: avfilter.h:171
ret
ret
Definition: filter_design.txt:187
ShowWavesContext::draw_sample
void(* draw_sample)(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:92
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
ShowWavesContext::fg
uint8_t * fg
Definition: avf_showwaves.c:89
SCALE_LOG
@ SCALE_LOG
Definition: avf_showwaves.c:50
ff_all_samplerates
AVFilterFormats * ff_all_samplerates(void)
Definition: formats.c:551
channel_layout.h
mode
mode
Definition: ebur128.h:83
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:225
avfilter.h
draw_sample_cline_rgba_scale
static void draw_sample_cline_rgba_scale(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:335
ShowWavesContext::split_channels
int split_channels
Definition: avf_showwaves.c:87
ShowWavesContext::colors
char * colors
Definition: avf_showwaves.c:77
MODE_LINE
@ MODE_LINE
Definition: avf_showwaves.c:42
alloc_out_frame
static int alloc_out_frame(ShowWavesContext *showwaves, const int16_t *p, const AVFilterLink *inlink, AVFilterLink *outlink, const AVFrame *in)
Definition: avf_showwaves.c:657
ff_outlink_get_status
int ff_outlink_get_status(AVFilterLink *link)
Get the status on an output link.
Definition: avfilter.c:1534
AVFilterContext
An instance of a filter.
Definition: avfilter.h:408
ShowWavesMode
ShowWavesMode
Definition: avf_showwaves.c:40
av_strdup
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:280
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
audio.h
OFFSET
#define OFFSET(x)
Definition: avf_showwaves.c:103
AVFilterFormatsConfig::formats
AVFilterFormats * formats
List of supported formats (pixel or sample).
Definition: avfilter.h:512
av_free
#define av_free(p)
Definition: tableprint_vlc.h:33
FF_FILTER_FORWARD_STATUS
FF_FILTER_FORWARD_STATUS(inlink, outlink)
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Definition: opt.h:244
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: internal.h:191
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:52
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:370
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
h
h
Definition: vp9dsp_template.c:2038
ShowWavesDrawMode
ShowWavesDrawMode
Definition: avf_showwaves.c:56
avstring.h
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Definition: opt.h:229
int
int
Definition: ffmpeg_filter.c:153
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:234
ShowWavesContext::pixstep
int pixstep
Definition: avf_showwaves.c:82
ShowWavesContext
Definition: avf_showwaves.c:73
SCALE_NB
@ SCALE_NB
Definition: avf_showwaves.c:53
draw_sample_line_rgba_full
static void draw_sample_line_rgba_full(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:266
draw_sample_point_rgba_full
static void draw_sample_point_rgba_full(uint8_t *buf, int height, int linesize, int16_t *prev_y, const uint8_t color[4], int h)
Definition: avf_showwaves.c:237