FFmpeg
vf_showinfo.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2011 Stefano Sabatini
3  * This file is part of FFmpeg.
4  *
5  * FFmpeg is free software; you can redistribute it and/or
6  * modify it under the terms of the GNU Lesser General Public
7  * License as published by the Free Software Foundation; either
8  * version 2.1 of the License, or (at your option) any later version.
9  *
10  * FFmpeg is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13  * Lesser General Public License for more details.
14  *
15  * You should have received a copy of the GNU Lesser General Public
16  * License along with FFmpeg; if not, write to the Free Software
17  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18  */
19 
20 /**
21  * @file
22  * filter for showing textual video frame information
23  */
24 
25 #include <inttypes.h>
26 
27 #include "libavutil/bswap.h"
28 #include "libavutil/adler32.h"
29 #include "libavutil/display.h"
30 #include "libavutil/imgutils.h"
31 #include "libavutil/internal.h"
33 #include "libavutil/opt.h"
34 #include "libavutil/pixdesc.h"
35 #include "libavutil/spherical.h"
36 #include "libavutil/stereo3d.h"
37 #include "libavutil/timestamp.h"
38 #include "libavutil/timecode.h"
41 
42 #include "avfilter.h"
43 #include "internal.h"
44 #include "video.h"
45 
46 typedef struct ShowInfoContext {
47  const AVClass *class;
50 
51 #define OFFSET(x) offsetof(ShowInfoContext, x)
52 #define VF AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
53 
54 static const AVOption showinfo_options[] = {
55  { "checksum", "calculate checksums", OFFSET(calculate_checksums), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, VF },
56  { NULL }
57 };
58 
59 AVFILTER_DEFINE_CLASS(showinfo);
60 
62 {
63  const AVSphericalMapping *spherical = (const AVSphericalMapping *)sd->data;
64  double yaw, pitch, roll;
65 
66  av_log(ctx, AV_LOG_INFO, "spherical information: ");
67  if (sd->size < sizeof(*spherical)) {
68  av_log(ctx, AV_LOG_ERROR, "invalid data\n");
69  return;
70  }
71 
72  if (spherical->projection == AV_SPHERICAL_EQUIRECTANGULAR)
73  av_log(ctx, AV_LOG_INFO, "equirectangular ");
74  else if (spherical->projection == AV_SPHERICAL_CUBEMAP)
75  av_log(ctx, AV_LOG_INFO, "cubemap ");
76  else if (spherical->projection == AV_SPHERICAL_EQUIRECTANGULAR_TILE)
77  av_log(ctx, AV_LOG_INFO, "tiled equirectangular ");
78  else {
79  av_log(ctx, AV_LOG_WARNING, "unknown\n");
80  return;
81  }
82 
83  yaw = ((double)spherical->yaw) / (1 << 16);
84  pitch = ((double)spherical->pitch) / (1 << 16);
85  roll = ((double)spherical->roll) / (1 << 16);
86  av_log(ctx, AV_LOG_INFO, "(%f/%f/%f) ", yaw, pitch, roll);
87 
89  size_t l, t, r, b;
90  av_spherical_tile_bounds(spherical, frame->width, frame->height,
91  &l, &t, &r, &b);
92  av_log(ctx, AV_LOG_INFO,
94  l, t, r, b);
95  } else if (spherical->projection == AV_SPHERICAL_CUBEMAP) {
96  av_log(ctx, AV_LOG_INFO, "[pad %"PRIu32"] ", spherical->padding);
97  }
98 }
99 
101 {
102  const AVStereo3D *stereo;
103 
104  av_log(ctx, AV_LOG_INFO, "stereoscopic information: ");
105  if (sd->size < sizeof(*stereo)) {
106  av_log(ctx, AV_LOG_ERROR, "invalid data\n");
107  return;
108  }
109 
110  stereo = (const AVStereo3D *)sd->data;
111 
112  av_log(ctx, AV_LOG_INFO, "type - %s", av_stereo3d_type_name(stereo->type));
113 
114  if (stereo->flags & AV_STEREO3D_FLAG_INVERT)
115  av_log(ctx, AV_LOG_INFO, " (inverted)");
116 }
117 
119 {
120  const uint32_t *tc = (const uint32_t *)sd->data;
121 
122  if ((sd->size != sizeof(uint32_t) * 4) || (tc[0] > 3)) {
123  av_log(ctx, AV_LOG_ERROR, "invalid data\n");
124  return;
125  }
126 
127  for (int j = 1; j <= tc[0]; j++) {
128  char tcbuf[AV_TIMECODE_STR_SIZE];
129  av_timecode_make_smpte_tc_string2(tcbuf, frame_rate, tc[j], 0, 0);
130  av_log(ctx, AV_LOG_INFO, "timecode - %s%s", tcbuf, j != tc[0] ? ", " : "");
131  }
132 }
133 
134 static void dump_roi(AVFilterContext *ctx, const AVFrameSideData *sd)
135 {
136  int nb_rois;
137  const AVRegionOfInterest *roi;
138  uint32_t roi_size;
139 
140  roi = (const AVRegionOfInterest *)sd->data;
141  roi_size = roi->self_size;
142  if (!roi_size || sd->size % roi_size != 0) {
143  av_log(ctx, AV_LOG_ERROR, "Invalid AVRegionOfInterest.self_size.\n");
144  return;
145  }
146  nb_rois = sd->size / roi_size;
147 
148  av_log(ctx, AV_LOG_INFO, "Regions Of Interest(RoI) information: ");
149  for (int i = 0; i < nb_rois; i++) {
150  roi = (const AVRegionOfInterest *)(sd->data + roi_size * i);
151  av_log(ctx, AV_LOG_INFO, "index: %d, region: (%d, %d)/(%d, %d), qp offset: %d/%d.\n",
152  i, roi->left, roi->top, roi->right, roi->bottom, roi->qoffset.num, roi->qoffset.den);
153  }
154 }
155 
157 {
158  const AVMasteringDisplayMetadata *mastering_display;
159 
160  av_log(ctx, AV_LOG_INFO, "mastering display: ");
161  if (sd->size < sizeof(*mastering_display)) {
162  av_log(ctx, AV_LOG_ERROR, "invalid data\n");
163  return;
164  }
165 
166  mastering_display = (const AVMasteringDisplayMetadata *)sd->data;
167 
168  av_log(ctx, AV_LOG_INFO, "has_primaries:%d has_luminance:%d "
169  "r(%5.4f,%5.4f) g(%5.4f,%5.4f) b(%5.4f %5.4f) wp(%5.4f, %5.4f) "
170  "min_luminance=%f, max_luminance=%f",
171  mastering_display->has_primaries, mastering_display->has_luminance,
172  av_q2d(mastering_display->display_primaries[0][0]),
173  av_q2d(mastering_display->display_primaries[0][1]),
174  av_q2d(mastering_display->display_primaries[1][0]),
175  av_q2d(mastering_display->display_primaries[1][1]),
176  av_q2d(mastering_display->display_primaries[2][0]),
177  av_q2d(mastering_display->display_primaries[2][1]),
178  av_q2d(mastering_display->white_point[0]), av_q2d(mastering_display->white_point[1]),
179  av_q2d(mastering_display->min_luminance), av_q2d(mastering_display->max_luminance));
180 }
181 
183 {
184  AVDynamicHDRPlus *hdr_plus;
185 
186  av_log(ctx, AV_LOG_INFO, "HDR10+ metadata: ");
187  if (sd->size < sizeof(*hdr_plus)) {
188  av_log(ctx, AV_LOG_ERROR, "invalid data\n");
189  return;
190  }
191 
192  hdr_plus = (AVDynamicHDRPlus *)sd->data;
193  av_log(ctx, AV_LOG_INFO, "application version: %d, ", hdr_plus->application_version);
194  av_log(ctx, AV_LOG_INFO, "num_windows: %d, ", hdr_plus->num_windows);
195  for (int w = 1; w < hdr_plus->num_windows; w++) {
197  av_log(ctx, AV_LOG_INFO, "window %d { ", w);
198  av_log(ctx, AV_LOG_INFO, "window_upper_left_corner: (%5.4f,%5.4f),",
201  av_log(ctx, AV_LOG_INFO, "window_lower_right_corner: (%5.4f,%5.4f), ",
204  av_log(ctx, AV_LOG_INFO, "window_upper_left_corner: (%5.4f, %5.4f), ",
207  av_log(ctx, AV_LOG_INFO, "center_of_ellipse_x: (%d,%d), ",
208  params->center_of_ellipse_x,
209  params->center_of_ellipse_y);
210  av_log(ctx, AV_LOG_INFO, "rotation_angle: %d, ",
211  params->rotation_angle);
212  av_log(ctx, AV_LOG_INFO, "semimajor_axis_internal_ellipse: %d, ",
214  av_log(ctx, AV_LOG_INFO, "semimajor_axis_external_ellipse: %d, ",
216  av_log(ctx, AV_LOG_INFO, "semiminor_axis_external_ellipse: %d, ",
218  av_log(ctx, AV_LOG_INFO, "overlap_process_option: %d}, ",
219  params->overlap_process_option);
220  }
221  av_log(ctx, AV_LOG_INFO, "targeted_system_display_maximum_luminance: %9.4f, ",
224  av_log(ctx, AV_LOG_INFO, "targeted_system_display_actual_peak_luminance: {");
225  for (int i = 0; i < hdr_plus->num_rows_targeted_system_display_actual_peak_luminance; i++) {
226  av_log(ctx, AV_LOG_INFO, "(");
227  for (int j = 0; j < hdr_plus->num_cols_targeted_system_display_actual_peak_luminance; j++) {
228  av_log(ctx, AV_LOG_INFO, "%5.4f,",
230  }
231  av_log(ctx, AV_LOG_INFO, ")");
232  }
233  av_log(ctx, AV_LOG_INFO, "}, ");
234  }
235 
236  for (int w = 0; w < hdr_plus->num_windows; w++) {
238  av_log(ctx, AV_LOG_INFO, "window %d {maxscl: {", w);
239  for (int i = 0; i < 3; i++) {
240  av_log(ctx, AV_LOG_INFO, "%5.4f,",av_q2d(params->maxscl[i]));
241  }
242  av_log(ctx, AV_LOG_INFO, "} average_maxrgb: %5.4f, ",
243  av_q2d(params->average_maxrgb));
244  av_log(ctx, AV_LOG_INFO, "distribution_maxrgb: {");
245  for (int i = 0; i < params->num_distribution_maxrgb_percentiles; i++) {
246  av_log(ctx, AV_LOG_INFO, "(%d,%5.4f)",
249  }
250  av_log(ctx, AV_LOG_INFO, "} fraction_bright_pixels: %5.4f, ",
251  av_q2d(params->fraction_bright_pixels));
252  if (params->tone_mapping_flag) {
253  av_log(ctx, AV_LOG_INFO, "knee_point: (%5.4f,%5.4f), ", av_q2d(params->knee_point_x), av_q2d(params->knee_point_y));
254  av_log(ctx, AV_LOG_INFO, "bezier_curve_anchors: {");
255  for (int i = 0; i < params->num_bezier_curve_anchors; i++) {
256  av_log(ctx, AV_LOG_INFO, "%5.4f,",
257  av_q2d(params->bezier_curve_anchors[i]));
258  }
259  av_log(ctx, AV_LOG_INFO, "} ");
260  }
261  if (params->color_saturation_mapping_flag) {
262  av_log(ctx, AV_LOG_INFO, "color_saturation_weight: %5.4f",
264  }
265  av_log(ctx, AV_LOG_INFO, "} ");
266  }
267 
269  av_log(ctx, AV_LOG_INFO, "mastering_display_actual_peak_luminance: {");
270  for (int i = 0; i < hdr_plus->num_rows_mastering_display_actual_peak_luminance; i++) {
271  av_log(ctx, AV_LOG_INFO, "(");
272  for (int j = 0; j < hdr_plus->num_cols_mastering_display_actual_peak_luminance; j++) {
273  av_log(ctx, AV_LOG_INFO, " %5.4f,",
275  }
276  av_log(ctx, AV_LOG_INFO, ")");
277  }
278  av_log(ctx, AV_LOG_INFO, "} ");
279  }
280 }
281 
283 {
284  const AVContentLightMetadata *metadata = (const AVContentLightMetadata *)sd->data;
285 
286  av_log(ctx, AV_LOG_INFO, "Content Light Level information: "
287  "MaxCLL=%d, MaxFALL=%d",
288  metadata->MaxCLL, metadata->MaxFALL);
289 }
290 
292 {
293  const AVVideoEncParams *par = (const AVVideoEncParams *)sd->data;
294  int plane, acdc;
295 
296  av_log(ctx, AV_LOG_INFO, "video encoding parameters: type %d; ", par->type);
297  if (par->qp)
298  av_log(ctx, AV_LOG_INFO, "qp=%d; ", par->qp);
299  for (plane = 0; plane < FF_ARRAY_ELEMS(par->delta_qp); plane++)
300  for (acdc = 0; acdc < FF_ARRAY_ELEMS(par->delta_qp[plane]); acdc++) {
301  int delta_qp = par->delta_qp[plane][acdc];
302  if (delta_qp)
303  av_log(ctx, AV_LOG_INFO, "delta_qp[%d][%d]=%d; ",
304  plane, acdc, delta_qp);
305  }
306  if (par->nb_blocks)
307  av_log(ctx, AV_LOG_INFO, "%u blocks; ", par->nb_blocks);
308 }
309 
311 {
312  const int uuid_size = 16;
313  const uint8_t *user_data = sd->data;
314  int i;
315 
316  if (sd->size < uuid_size) {
317  av_log(ctx, AV_LOG_ERROR, "invalid data(%d < UUID(%d-bytes))\n", sd->size, uuid_size);
318  return;
319  }
320 
321  av_log(ctx, AV_LOG_INFO, "User Data Unregistered:\n");
322  av_log(ctx, AV_LOG_INFO, "UUID=");
323  for (i = 0; i < uuid_size; i++) {
324  av_log(ctx, AV_LOG_INFO, "%02x", user_data[i]);
325  if (i == 3 || i == 5 || i == 7 || i == 9)
326  av_log(ctx, AV_LOG_INFO, "-");
327  }
328  av_log(ctx, AV_LOG_INFO, "\n");
329 
330  av_log(ctx, AV_LOG_INFO, "User Data=");
331  for (; i < sd->size; i++) {
332  av_log(ctx, AV_LOG_INFO, "%02x", user_data[i]);
333  }
334  av_log(ctx, AV_LOG_INFO, "\n");
335 }
336 
338 {
339  const char *color_range_str = av_color_range_name(frame->color_range);
340  const char *colorspace_str = av_color_space_name(frame->colorspace);
341  const char *color_primaries_str = av_color_primaries_name(frame->color_primaries);
342  const char *color_trc_str = av_color_transfer_name(frame->color_trc);
343 
344  if (!color_range_str || frame->color_range == AVCOL_RANGE_UNSPECIFIED) {
345  av_log(ctx, AV_LOG_INFO, "color_range:unknown");
346  } else {
347  av_log(ctx, AV_LOG_INFO, "color_range:%s", color_range_str);
348  }
349 
350  if (!colorspace_str || frame->colorspace == AVCOL_SPC_UNSPECIFIED) {
351  av_log(ctx, AV_LOG_INFO, " color_space:unknown");
352  } else {
353  av_log(ctx, AV_LOG_INFO, " color_space:%s", colorspace_str);
354  }
355 
356  if (!color_primaries_str || frame->color_primaries == AVCOL_PRI_UNSPECIFIED) {
357  av_log(ctx, AV_LOG_INFO, " color_primaries:unknown");
358  } else {
359  av_log(ctx, AV_LOG_INFO, " color_primaries:%s", color_primaries_str);
360  }
361 
362  if (!color_trc_str || frame->color_trc == AVCOL_TRC_UNSPECIFIED) {
363  av_log(ctx, AV_LOG_INFO, " color_trc:unknown");
364  } else {
365  av_log(ctx, AV_LOG_INFO, " color_trc:%s", color_trc_str);
366  }
367  av_log(ctx, AV_LOG_INFO, "\n");
368 }
369 
370 static void update_sample_stats_8(const uint8_t *src, int len, int64_t *sum, int64_t *sum2)
371 {
372  int i;
373 
374  for (i = 0; i < len; i++) {
375  *sum += src[i];
376  *sum2 += src[i] * src[i];
377  }
378 }
379 
380 static void update_sample_stats_16(int be, const uint8_t *src, int len, int64_t *sum, int64_t *sum2)
381 {
382  const uint16_t *src1 = (const uint16_t *)src;
383  int i;
384 
385  for (i = 0; i < len / 2; i++) {
386  if ((HAVE_BIGENDIAN && !be) || (!HAVE_BIGENDIAN && be)) {
387  *sum += av_bswap16(src1[i]);
388  *sum2 += (uint32_t)av_bswap16(src1[i]) * (uint32_t)av_bswap16(src1[i]);
389  } else {
390  *sum += src1[i];
391  *sum2 += (uint32_t)src1[i] * (uint32_t)src1[i];
392  }
393  }
394 }
395 
396 static void update_sample_stats(int depth, int be, const uint8_t *src, int len, int64_t *sum, int64_t *sum2)
397 {
398  if (depth <= 8)
399  update_sample_stats_8(src, len, sum, sum2);
400  else
401  update_sample_stats_16(be, src, len, sum, sum2);
402 }
403 
405 {
406  AVFilterContext *ctx = inlink->dst;
407  ShowInfoContext *s = ctx->priv;
409  uint32_t plane_checksum[4] = {0}, checksum = 0;
410  int64_t sum[4] = {0}, sum2[4] = {0};
411  int32_t pixelcount[4] = {0};
412  int bitdepth = desc->comp[0].depth;
413  int be = desc->flags & AV_PIX_FMT_FLAG_BE;
414  int i, plane, vsub = desc->log2_chroma_h;
415 
416  for (plane = 0; plane < 4 && s->calculate_checksums && frame->data[plane] && frame->linesize[plane]; plane++) {
417  uint8_t *data = frame->data[plane];
418  int h = plane == 1 || plane == 2 ? AV_CEIL_RSHIFT(inlink->h, vsub) : inlink->h;
419  int linesize = av_image_get_linesize(frame->format, frame->width, plane);
420  int width = linesize >> (bitdepth > 8);
421 
422  if (linesize < 0)
423  return linesize;
424 
425  for (i = 0; i < h; i++) {
426  plane_checksum[plane] = av_adler32_update(plane_checksum[plane], data, linesize);
427  checksum = av_adler32_update(checksum, data, linesize);
428 
429  update_sample_stats(bitdepth, be, data, linesize, sum+plane, sum2+plane);
430  pixelcount[plane] += width;
431  data += frame->linesize[plane];
432  }
433  }
434 
435  av_log(ctx, AV_LOG_INFO,
436  "n:%4"PRId64" pts:%7s pts_time:%-7s pos:%9"PRId64" "
437  "fmt:%s sar:%d/%d s:%dx%d i:%c iskey:%d type:%c ",
438  inlink->frame_count_out,
439  av_ts2str(frame->pts), av_ts2timestr(frame->pts, &inlink->time_base), frame->pkt_pos,
440  desc->name,
442  frame->width, frame->height,
443  !frame->interlaced_frame ? 'P' : /* Progressive */
444  frame->top_field_first ? 'T' : 'B', /* Top / Bottom */
445  frame->key_frame,
447 
448  if (s->calculate_checksums) {
449  av_log(ctx, AV_LOG_INFO,
450  "checksum:%08"PRIX32" plane_checksum:[%08"PRIX32,
451  checksum, plane_checksum[0]);
452 
453  for (plane = 1; plane < 4 && frame->data[plane] && frame->linesize[plane]; plane++)
454  av_log(ctx, AV_LOG_INFO, " %08"PRIX32, plane_checksum[plane]);
455  av_log(ctx, AV_LOG_INFO, "] mean:[");
456  for (plane = 0; plane < 4 && frame->data[plane] && frame->linesize[plane]; plane++)
457  av_log(ctx, AV_LOG_INFO, "%"PRId64" ", (sum[plane] + pixelcount[plane]/2) / pixelcount[plane]);
458  av_log(ctx, AV_LOG_INFO, "\b] stdev:[");
459  for (plane = 0; plane < 4 && frame->data[plane] && frame->linesize[plane]; plane++)
460  av_log(ctx, AV_LOG_INFO, "%3.1f ",
461  sqrt((sum2[plane] - sum[plane]*(double)sum[plane]/pixelcount[plane])/pixelcount[plane]));
462  av_log(ctx, AV_LOG_INFO, "\b]");
463  }
464  av_log(ctx, AV_LOG_INFO, "\n");
465 
466  for (i = 0; i < frame->nb_side_data; i++) {
467  AVFrameSideData *sd = frame->side_data[i];
468 
469  av_log(ctx, AV_LOG_INFO, " side data - ");
470  switch (sd->type) {
472  av_log(ctx, AV_LOG_INFO, "pan/scan");
473  break;
475  av_log(ctx, AV_LOG_INFO, "A/53 closed captions (%d bytes)", sd->size);
476  break;
478  dump_spherical(ctx, frame, sd);
479  break;
481  dump_stereo3d(ctx, sd);
482  break;
484  dump_s12m_timecode(ctx, inlink->frame_rate, sd);
485  break;
486  }
488  av_log(ctx, AV_LOG_INFO, "displaymatrix: rotation of %.2f degrees",
490  break;
491  case AV_FRAME_DATA_AFD:
492  av_log(ctx, AV_LOG_INFO, "afd: value of %"PRIu8, sd->data[0]);
493  break;
495  dump_roi(ctx, sd);
496  break;
498  dump_mastering_display(ctx, sd);
499  break;
501  dump_dynamic_hdr_plus(ctx, sd);
502  break;
505  break;
507  char tcbuf[AV_TIMECODE_STR_SIZE];
508  av_timecode_make_mpeg_tc_string(tcbuf, *(int64_t *)(sd->data));
509  av_log(ctx, AV_LOG_INFO, "GOP timecode - %s", tcbuf);
510  break;
511  }
513  dump_video_enc_params(ctx, sd);
514  break;
517  break;
518  default:
519  av_log(ctx, AV_LOG_WARNING, "unknown side data type %d (%d bytes)\n",
520  sd->type, sd->size);
521  break;
522  }
523 
524  av_log(ctx, AV_LOG_INFO, "\n");
525  }
526 
527  dump_color_property(ctx, frame);
528 
529  return ff_filter_frame(inlink->dst->outputs[0], frame);
530 }
531 
533 {
534 
535  av_log(ctx, AV_LOG_INFO, "config %s time_base: %d/%d, frame_rate: %d/%d\n",
536  is_out ? "out" : "in",
537  link->time_base.num, link->time_base.den,
538  link->frame_rate.num, link->frame_rate.den);
539 
540  return 0;
541 }
542 
544 {
545  AVFilterContext *ctx = link->dst;
546  return config_props(ctx, link, 0);
547 }
548 
550 {
551  AVFilterContext *ctx = link->src;
552  return config_props(ctx, link, 1);
553 }
554 
556  {
557  .name = "default",
558  .type = AVMEDIA_TYPE_VIDEO,
559  .filter_frame = filter_frame,
560  .config_props = config_props_in,
561  },
562  { NULL }
563 };
564 
566  {
567  .name = "default",
568  .type = AVMEDIA_TYPE_VIDEO,
569  .config_props = config_props_out,
570  },
571  { NULL }
572 };
573 
575  .name = "showinfo",
576  .description = NULL_IF_CONFIG_SMALL("Show textual information for each video frame."),
577  .inputs = avfilter_vf_showinfo_inputs,
578  .outputs = avfilter_vf_showinfo_outputs,
579  .priv_size = sizeof(ShowInfoContext),
580  .priv_class = &showinfo_class,
581 };
int32_t pitch
Rotation around the right vector [-90, 90].
Definition: spherical.h:127
#define AV_STEREO3D_FLAG_INVERT
Inverted views, Right/Bottom represents the left view.
Definition: stereo3d.h:167
#define NULL
Definition: coverity.c:32
uint8_t color_saturation_mapping_flag
This flag shall be equal to 0 in bitstreams conforming to this version of this Specification.
static int config_props_out(AVFilterLink *link)
Definition: vf_showinfo.c:549
int av_image_get_linesize(enum AVPixelFormat pix_fmt, int width, int plane)
Compute the size of an image line with format pix_fmt and width width for the plane plane...
Definition: imgutils.c:76
int top
Distance in pixels from the top edge of the frame to the top and bottom edges and from the left edge ...
Definition: frame.h:258
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2573
This structure describes decoded (raw) audio or video data.
Definition: frame.h:318
uint8_t tone_mapping_flag
This flag indicates that the metadata for the tone mapping function in the processing window is prese...
unsigned MaxCLL
Max content light level (cd/m^2).
AVOption.
Definition: opt.h:248
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:100
This struct represents dynamic metadata for color volume transform - application 4 of SMPTE 2094-40:2...
int64_t pkt_pos
reordered pos from the last AVPacket that has been input into the decoder
Definition: frame.h:589
int32_t qp
Base quantisation parameter for the frame.
const char * desc
Definition: libsvtav1.c:79
misc image utilities
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:200
Main libavfilter public API header.
enum AVHDRPlusOverlapProcessOption overlap_process_option
Overlap process option indicates one of the two methods of combining rendered pixels in the processin...
AVRational window_lower_right_corner_y
The relative y coordinate of the bottom right pixel of the processing window.
Video represents a portion of a sphere mapped on a flat surface using equirectangular projection...
Definition: spherical.h:72
AVRational white_point[2]
CIE 1931 xy chromaticity coords of white point.
Content light level (based on CTA-861.3).
Definition: frame.h:136
int num
Numerator.
Definition: rational.h:59
Timecode which conforms to SMPTE ST 12-1.
Definition: frame.h:168
static void dump_s12m_timecode(AVFilterContext *ctx, AVRational frame_rate, const AVFrameSideData *sd)
Definition: vf_showinfo.c:118
#define av_bswap16
Definition: bswap.h:31
#define OFFSET(x)
Definition: vf_showinfo.c:51
char * av_timecode_make_mpeg_tc_string(char *buf, uint32_t tc25bit)
Get the timecode string from the 25-bit timecode format (MPEG GOP format).
Definition: timecode.c:165
static void update_sample_stats(int depth, int be, const uint8_t *src, int len, int64_t *sum, int64_t *sum2)
Definition: vf_showinfo.c:396
#define tc
Definition: regdef.h:69
int calculate_checksums
Definition: vf_showinfo.c:48
Mastering display metadata associated with a video frame.
Definition: frame.h:119
Video represents a sphere mapped on a flat surface using equirectangular projection.
Definition: spherical.h:56
uint8_t num_bezier_curve_anchors
The number of the intermediate anchor parameters of the tone mapping function in the processing windo...
uint16_t semiminor_axis_external_ellipse
The semi-minor axis value of the external ellipse of the elliptical pixel selector in amount of pixel...
int has_primaries
Flag indicating whether the display primaries (and white point) are set.
#define FF_ARRAY_ELEMS(a)
static const AVFilterPad avfilter_vf_showinfo_inputs[]
Definition: vf_showinfo.c:555
static int filter_frame(AVFilterLink *inlink, AVFrame *frame)
Definition: vf_showinfo.c:404
const char * av_color_space_name(enum AVColorSpace space)
Definition: pixdesc.c:2961
AVRational window_upper_left_corner_x
The relative x coordinate of the top left pixel of the processing window.
AVRational qoffset
Quantisation offset.
Definition: frame.h:285
int32_t delta_qp[4][2]
Quantisation parameter offset from the base (per-frame) qp for a given plane (first index) and AC/DC ...
const char * name
Pad name.
Definition: internal.h:60
AVRational targeted_system_display_maximum_luminance
The nominal maximum display luminance of the targeted system display, in units of 0...
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1094
AVRational knee_point_x
The x coordinate of the separation point between the linear part and the curved part of the tone mapp...
uint8_t mastering_display_actual_peak_luminance_flag
This flag shall be equal to 0 in bitstreams conforming to this version of this Specification.
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
Definition: pixdesc.h:117
uint8_t
AVRational knee_point_y
The y coordinate of the separation point between the linear part and the curved part of the tone mapp...
static void dump_color_property(AVFilterContext *ctx, AVFrame *frame)
Definition: vf_showinfo.c:337
AVOptions.
timestamp utils, mostly useful for debugging/logging purposes
Stereo 3D type: this structure describes how two videos are packed within a single video surface...
Definition: stereo3d.h:176
static int config_props_in(AVFilterLink *link)
Definition: vf_showinfo.c:543
const char * av_color_range_name(enum AVColorRange range)
Definition: pixdesc.c:2901
void av_spherical_tile_bounds(const AVSphericalMapping *map, size_t width, size_t height, size_t *left, size_t *top, size_t *right, size_t *bottom)
Convert the bounding fields from an AVSphericalVideo from 0.32 fixed point to pixels.
Definition: spherical.c:37
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:411
uint8_t num_cols_targeted_system_display_actual_peak_luminance
The number of columns in the targeted_system_display_actual_peak_luminance array. ...
The data is the AVPanScan struct defined in libavcodec.
Definition: frame.h:52
HDR dynamic metadata associated with a video frame.
Definition: frame.h:175
uint8_t application_version
Application version in the application defining document in ST-2094 suite.
Structure to hold side data for an AVFrame.
Definition: frame.h:220
const char * av_stereo3d_type_name(unsigned int type)
Provide a human-readable name of a given stereo3d type.
Definition: stereo3d.c:57
static double av_q2d(AVRational a)
Convert an AVRational to a double.
Definition: rational.h:104
static void dump_roi(AVFilterContext *ctx, const AVFrameSideData *sd)
Definition: vf_showinfo.c:134
char av_get_picture_type_char(enum AVPictureType pict_type)
Return a single letter to describe the given picture type pict_type.
Definition: utils.c:88
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:465
int has_luminance
Flag indicating whether the luminance (min_ and max_) have been set.
uint32_t self_size
Must be set to the size of this data structure (that is, sizeof(AVRegionOfInterest)).
Definition: frame.h:248
The data represents the AVSphericalMapping structure defined in libavutil/spherical.h.
Definition: frame.h:130
static void dump_video_enc_params(AVFilterContext *ctx, const AVFrameSideData *sd)
Definition: vf_showinfo.c:291
Active Format Description data consisting of a single byte as specified in ETSI TS 101 154 using AVAc...
Definition: frame.h:89
int nb_side_data
Definition: frame.h:530
AVFrameSideData ** side_data
Definition: frame.h:529
#define av_log(a,...)
static void dump_mastering_display(AVFilterContext *ctx, const AVFrameSideData *sd)
Definition: vf_showinfo.c:156
AVRational fraction_bright_pixels
The fraction of selected pixels in the image that contains the brightest pixel in the scene...
const char * name
Definition: pixdesc.h:82
A filter pad used for either input or output.
Definition: internal.h:54
#define src
Definition: vp8dsp.c:255
AVFILTER_DEFINE_CLASS(showinfo)
Content light level needed by to transmit HDR over HDMI (CTA-861.3).
int width
Definition: frame.h:376
AVHDRPlusPercentile distribution_maxrgb[15]
The linearized maxRGB values at given percentiles in the processing window in the scene...
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:194
int flags
Additional information about the frame packing.
Definition: stereo3d.h:185
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:101
uint8_t percentage
The percentage value corresponding to a specific percentile linearized RGB value in the processing wi...
AVRational bezier_curve_anchors[15]
The intermediate anchor parameters of the tone mapping function in the processing window in the scene...
static void dump_dynamic_hdr_plus(AVFilterContext *ctx, AVFrameSideData *sd)
Definition: vf_showinfo.c:182
#define av_ts2timestr(ts, tb)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: timestamp.h:76
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:117
uint8_t num_rows_mastering_display_actual_peak_luminance
The number of rows in the mastering_display_actual_peak_luminance array.
Display matrix.
uint16_t semimajor_axis_external_ellipse
The semi-major axis value of the external ellipse of the elliptical pixel selector in amount of pixel...
const char * r
Definition: vf_curves.c:116
void * priv
private data for use by the filter
Definition: avfilter.h:356
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: frame.h:562
ATSC A53 Part 4 Closed Captions.
Definition: frame.h:58
GLenum GLint * params
Definition: opengl_enc.c:113
enum AVColorSpace colorspace
YUV colorspace type.
Definition: frame.h:573
enum AVVideoEncParamsType type
Type of the parameters (the codec they are used with).
The GOP timecode in 25 bit timecode format.
Definition: frame.h:124
AVRational max_luminance
Max luminance of mastering display (cd/m^2).
unsigned long av_adler32_update(unsigned long adler, const uint8_t *buf, unsigned int len)
Calculate the Adler32 checksum of a buffer.
Definition: adler32.c:45
const char * av_color_primaries_name(enum AVColorPrimaries primaries)
Definition: pixdesc.c:2919
common internal API header
uint64_t flags
Combination of AV_PIX_FMT_FLAG_...
Definition: pixdesc.h:106
static void update_sample_stats_16(int be, const uint8_t *src, int len, int64_t *sum, int64_t *sum2)
Definition: vf_showinfo.c:380
#define b
Definition: input.c:41
Video frame is split into 6 faces of a cube, and arranged on a 3x2 layout.
Definition: spherical.h:65
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:401
static int config_props(AVFilterContext *ctx, AVFilterLink *link, int is_out)
Definition: vf_showinfo.c:532
Spherical video.
Video encoding parameters for a given frame.
AVRational window_lower_right_corner_x
The relative x coordinate of the bottom right pixel of the processing window.
AVRational window_upper_left_corner_y
The relative y coordinate of the top left pixel of the processing window.
AVRational min_luminance
Min luminance of mastering display (cd/m^2).
#define width
uint8_t w
Definition: llviddspenc.c:39
static const AVOption showinfo_options[]
Definition: vf_showinfo.c:54
static int FUNC() user_data(CodedBitstreamContext *ctx, RWContext *rw, MPEG2RawUserData *current)
int32_t
AVFormatContext * ctx
Definition: movenc.c:48
#define VF
Definition: vf_showinfo.c:52
AVRational mastering_display_actual_peak_luminance[25][25]
The normalized actual peak luminance of the mastering display used for mastering the image essence...
char * av_timecode_make_smpte_tc_string2(char *buf, AVRational rate, uint32_t tcsmpte, int prevent_df, int skip_field)
Get the timecode string from the SMPTE timecode format.
Definition: timecode.c:136
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
#define s(width, name)
Definition: cbs_vp9.c:257
Structure describing a single Region Of Interest.
Definition: frame.h:243
int32_t yaw
Rotation around the up vector [-180, 180].
Definition: spherical.h:126
AVRational color_saturation_weight
The color saturation gain in the processing window in the scene.
AVRational maxscl[3]
The maximum of the color components of linearized RGB values in the processing window in the scene...
static volatile int checksum
Definition: adler32.c:30
static void dump_sei_unregistered_metadata(AVFilterContext *ctx, const AVFrameSideData *sd)
Definition: vf_showinfo.c:310
if(ret)
uint32_t padding
Number of pixels to pad from the edge of each cube face.
Definition: spherical.h:182
Public header for Adler-32 hash function implementation.
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:391
uint8_t num_distribution_maxrgb_percentiles
The number of linearized maxRGB values at given percentiles in the processing window in the scene...
#define src1
Definition: h264pred.c:140
enum AVStereo3DType type
How views are packed within the video.
Definition: stereo3d.h:180
#define AV_LOG_INFO
Standard information.
Definition: log.h:205
This side data contains a 3x3 transformation matrix describing an affine transformation that needs to...
Definition: frame.h:84
Timecode helpers header.
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:349
uint16_t center_of_ellipse_x
The x coordinate of the center position of the concentric internal and external ellipses of the ellip...
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
User data unregistered metadata associated with a video frame.
Definition: frame.h:194
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it be(in the first position) for now.Options-------Then comes the options array.This is what will define the user accessible options.For example
AVRational sample_aspect_ratio
Sample aspect ratio for the video frame, 0/1 if unknown/unspecified.
Definition: frame.h:406
uint8_t * data
Definition: frame.h:222
static const AVFilterPad avfilter_vf_showinfo_outputs[]
Definition: vf_showinfo.c:565
Describe the class of an AVClass context structure.
Definition: log.h:67
Filter definition.
Definition: avfilter.h:145
int32_t roll
Rotation around the forward vector [-180, 180].
Definition: spherical.h:128
Rational number (pair of numerator and denominator).
Definition: rational.h:58
Mastering display metadata capable of representing the color volume of the display used to master the...
static void dump_stereo3d(AVFilterContext *ctx, const AVFrameSideData *sd)
Definition: vf_showinfo.c:100
Regions Of Interest, the data is an array of AVRegionOfInterest type, the number of array element is ...
Definition: frame.h:181
byte swapping routines
uint16_t center_of_ellipse_y
The y coordinate of the center position of the concentric internal and external ellipses of the ellip...
const char * name
Filter name.
Definition: avfilter.h:149
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
uint8_t num_windows
The number of processing windows.
This structure describes how to handle spherical videos, outlining information about projection...
Definition: spherical.h:82
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:353
unsigned int nb_blocks
Number of blocks in the array.
AVRational display_primaries[3][2]
CIE 1931 xy chromaticity coords of color primaries (r, g, b order).
enum AVFrameSideDataType type
Definition: frame.h:221
#define SIZE_SPECIFIER
Definition: internal.h:193
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:332
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
AVRational average_maxrgb
The average of linearized maxRGB values in the processing window in the scene.
uint8_t num_rows_targeted_system_display_actual_peak_luminance
The number of rows in the targeted system_display_actual_peak_luminance array.
enum AVSphericalProjection projection
Projection type.
Definition: spherical.h:86
const char * av_color_transfer_name(enum AVColorTransferCharacteristic transfer)
Definition: pixdesc.c:2940
AVRational percentile
The linearized maxRGB value at a specific percentile in the processing window in the scene...
Stereoscopic video.
static void dump_content_light_metadata(AVFilterContext *ctx, AVFrameSideData *sd)
Definition: vf_showinfo.c:282
int den
Denominator.
Definition: rational.h:60
#define AV_PIX_FMT_FLAG_BE
Pixel format is big-endian.
Definition: pixdesc.h:128
AVRational targeted_system_display_actual_peak_luminance[25][25]
The normalized actual peak luminance of the targeted system display.
AVHDRPlusColorTransformParams params[3]
The color transform parameters for every processing window.
uint8_t targeted_system_display_actual_peak_luminance_flag
This flag shall be equal to 0 in bit streams conforming to this version of this Specification.
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:470
#define av_ts2str(ts)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: timestamp.h:54
int len
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:396
enum AVColorPrimaries color_primaries
Definition: frame.h:564
Encoding parameters for a video frame, as described by AVVideoEncParams.
Definition: frame.h:186
An instance of a filter.
Definition: avfilter.h:341
Color transform parameters at a processing window in a dynamic metadata for SMPTE 2094-40...
double av_display_rotation_get(const int32_t matrix[9])
Extract the rotation component of the transformation matrix.
Definition: display.c:34
int height
Definition: frame.h:376
enum AVColorTransferCharacteristic color_trc
Definition: frame.h:566
uint8_t num_cols_mastering_display_actual_peak_luminance
The number of columns in the mastering_display_actual_peak_luminance array.
internal API functions
int depth
Number of bits in the component.
Definition: pixdesc.h:58
uint8_t rotation_angle
The clockwise rotation angle in degree of arc with respect to the positive direction of the x-axis of...
Stereoscopic 3d metadata.
Definition: frame.h:63
unsigned MaxFALL
Max average light level per frame (cd/m^2).
static void dump_spherical(AVFilterContext *ctx, AVFrame *frame, const AVFrameSideData *sd)
Definition: vf_showinfo.c:61
static void update_sample_stats_8(const uint8_t *src, int len, int64_t *sum, int64_t *sum2)
Definition: vf_showinfo.c:370
int i
Definition: input.c:407
uint16_t semimajor_axis_internal_ellipse
The semi-major axis value of the internal ellipse of the elliptical pixel selector in amount of pixel...
#define AV_TIMECODE_STR_SIZE
Definition: timecode.h:33
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:58
AVFilter ff_vf_showinfo
Definition: vf_showinfo.c:574