FFmpeg
vf_colorlevels.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2013 Paul B Mahol
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 #include "libavutil/imgutils.h"
22 #include "libavutil/opt.h"
23 #include "libavutil/pixdesc.h"
24 #include "avfilter.h"
25 #include "drawutils.h"
26 #include "formats.h"
27 #include "internal.h"
28 #include "video.h"
29 #include "preserve_color.h"
30 
31 #define R 0
32 #define G 1
33 #define B 2
34 #define A 3
35 
36 typedef struct Range {
37  double in_min, in_max;
38  double out_min, out_max;
39 } Range;
40 
41 typedef struct ColorLevelsContext {
42  const AVClass *class;
45 
46  int nb_comp;
47  int depth;
48  int max;
49  int planar;
50  int bpp;
51  int step;
52  uint8_t rgba_map[4];
53  int linesize;
54 
55  int (*colorlevels_slice[2])(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
57 
58 #define OFFSET(x) offsetof(ColorLevelsContext, x)
59 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_RUNTIME_PARAM
60 static const AVOption colorlevels_options[] = {
61  { "rimin", "set input red black point", OFFSET(range[R].in_min), AV_OPT_TYPE_DOUBLE, {.dbl=0}, -1, 1, FLAGS },
62  { "gimin", "set input green black point", OFFSET(range[G].in_min), AV_OPT_TYPE_DOUBLE, {.dbl=0}, -1, 1, FLAGS },
63  { "bimin", "set input blue black point", OFFSET(range[B].in_min), AV_OPT_TYPE_DOUBLE, {.dbl=0}, -1, 1, FLAGS },
64  { "aimin", "set input alpha black point", OFFSET(range[A].in_min), AV_OPT_TYPE_DOUBLE, {.dbl=0}, -1, 1, FLAGS },
65  { "rimax", "set input red white point", OFFSET(range[R].in_max), AV_OPT_TYPE_DOUBLE, {.dbl=1}, -1, 1, FLAGS },
66  { "gimax", "set input green white point", OFFSET(range[G].in_max), AV_OPT_TYPE_DOUBLE, {.dbl=1}, -1, 1, FLAGS },
67  { "bimax", "set input blue white point", OFFSET(range[B].in_max), AV_OPT_TYPE_DOUBLE, {.dbl=1}, -1, 1, FLAGS },
68  { "aimax", "set input alpha white point", OFFSET(range[A].in_max), AV_OPT_TYPE_DOUBLE, {.dbl=1}, -1, 1, FLAGS },
69  { "romin", "set output red black point", OFFSET(range[R].out_min), AV_OPT_TYPE_DOUBLE, {.dbl=0}, 0, 1, FLAGS },
70  { "gomin", "set output green black point", OFFSET(range[G].out_min), AV_OPT_TYPE_DOUBLE, {.dbl=0}, 0, 1, FLAGS },
71  { "bomin", "set output blue black point", OFFSET(range[B].out_min), AV_OPT_TYPE_DOUBLE, {.dbl=0}, 0, 1, FLAGS },
72  { "aomin", "set output alpha black point", OFFSET(range[A].out_min), AV_OPT_TYPE_DOUBLE, {.dbl=0}, 0, 1, FLAGS },
73  { "romax", "set output red white point", OFFSET(range[R].out_max), AV_OPT_TYPE_DOUBLE, {.dbl=1}, 0, 1, FLAGS },
74  { "gomax", "set output green white point", OFFSET(range[G].out_max), AV_OPT_TYPE_DOUBLE, {.dbl=1}, 0, 1, FLAGS },
75  { "bomax", "set output blue white point", OFFSET(range[B].out_max), AV_OPT_TYPE_DOUBLE, {.dbl=1}, 0, 1, FLAGS },
76  { "aomax", "set output alpha white point", OFFSET(range[A].out_max), AV_OPT_TYPE_DOUBLE, {.dbl=1}, 0, 1, FLAGS },
77  { "preserve", "set preserve color mode", OFFSET(preserve_color), AV_OPT_TYPE_INT, {.i64=0}, 0, NB_PRESERVE-1, FLAGS, "preserve" },
78  { "none", "disabled", 0, AV_OPT_TYPE_CONST, {.i64=P_NONE}, 0, 0, FLAGS, "preserve" },
79  { "lum", "luminance", 0, AV_OPT_TYPE_CONST, {.i64=P_LUM}, 0, 0, FLAGS, "preserve" },
80  { "max", "max", 0, AV_OPT_TYPE_CONST, {.i64=P_MAX}, 0, 0, FLAGS, "preserve" },
81  { "avg", "average", 0, AV_OPT_TYPE_CONST, {.i64=P_AVG}, 0, 0, FLAGS, "preserve" },
82  { "sum", "sum", 0, AV_OPT_TYPE_CONST, {.i64=P_SUM}, 0, 0, FLAGS, "preserve" },
83  { "nrm", "norm", 0, AV_OPT_TYPE_CONST, {.i64=P_NRM}, 0, 0, FLAGS, "preserve" },
84  { "pwr", "power", 0, AV_OPT_TYPE_CONST, {.i64=P_PWR}, 0, 0, FLAGS, "preserve" },
85  { NULL }
86 };
87 
88 AVFILTER_DEFINE_CLASS(colorlevels);
89 
90 typedef struct ThreadData {
91  const uint8_t *srcrow[4];
92  uint8_t *dstrow[4];
94  int src_linesize;
95 
96  float coeff[4];
97 
98  int h;
99 
100  float fimin[4];
101  float fomin[4];
102  int imin[4];
103  int omin[4];
104 } ThreadData;
105 
106 #define DO_COMMON(type, ptype, clip, preserve, planar) \
107  const ThreadData *td = arg; \
108  const int linesize = s->linesize; \
109  const int step = s->step; \
110  const int process_h = td->h; \
111  const int slice_start = (process_h * jobnr ) / nb_jobs; \
112  const int slice_end = (process_h * (jobnr+1)) / nb_jobs; \
113  const int src_linesize = td->src_linesize / sizeof(type); \
114  const int dst_linesize = td->dst_linesize / sizeof(type); \
115  const type *src_r = (const type *)(td->srcrow[R]) + src_linesize * slice_start; \
116  const type *src_g = (const type *)(td->srcrow[G]) + src_linesize * slice_start; \
117  const type *src_b = (const type *)(td->srcrow[B]) + src_linesize * slice_start; \
118  const type *src_a = (const type *)(td->srcrow[A]) + src_linesize * slice_start; \
119  type *dst_r = (type *)(td->dstrow[R]) + src_linesize * slice_start; \
120  type *dst_g = (type *)(td->dstrow[G]) + src_linesize * slice_start; \
121  type *dst_b = (type *)(td->dstrow[B]) + src_linesize * slice_start; \
122  type *dst_a = (type *)(td->dstrow[A]) + src_linesize * slice_start; \
123  const ptype imin_r = s->depth == 32 ? td->fimin[R] : td->imin[R]; \
124  const ptype imin_g = s->depth == 32 ? td->fimin[G] : td->imin[G]; \
125  const ptype imin_b = s->depth == 32 ? td->fimin[B] : td->imin[B]; \
126  const ptype imin_a = s->depth == 32 ? td->fimin[A] : td->imin[A]; \
127  const ptype omin_r = s->depth == 32 ? td->fomin[R] : td->omin[R]; \
128  const ptype omin_g = s->depth == 32 ? td->fomin[G] : td->omin[G]; \
129  const ptype omin_b = s->depth == 32 ? td->fomin[B] : td->omin[B]; \
130  const ptype omin_a = s->depth == 32 ? td->fomin[A] : td->omin[A]; \
131  const float coeff_r = td->coeff[R]; \
132  const float coeff_g = td->coeff[G]; \
133  const float coeff_b = td->coeff[B]; \
134  const float coeff_a = td->coeff[A]; \
135  \
136  for (int y = slice_start; y < slice_end; y++) { \
137  for (int x = 0; x < linesize; x += step) { \
138  ptype ir, ig, ib, or, og, ob; \
139  ir = src_r[x]; \
140  ig = src_g[x]; \
141  ib = src_b[x]; \
142  if (preserve) { \
143  float ratio, icolor, ocolor, max = s->depth==32 ? 1.f : s->max; \
144  \
145  or = (ir - imin_r) * coeff_r + omin_r; \
146  og = (ig - imin_g) * coeff_g + omin_g; \
147  ob = (ib - imin_b) * coeff_b + omin_b; \
148  \
149  preserve_color(s->preserve_color, ir, ig, ib, or, og, ob, max, \
150  &icolor, &ocolor); \
151  if (ocolor > 0.f) { \
152  ratio = icolor / ocolor; \
153  \
154  or *= ratio; \
155  og *= ratio; \
156  ob *= ratio; \
157  } \
158  \
159  dst_r[x] = clip(or, depth); \
160  dst_g[x] = clip(og, depth); \
161  dst_b[x] = clip(ob, depth); \
162  } else { \
163  dst_r[x] = clip((ir - imin_r) * coeff_r + omin_r, depth); \
164  dst_g[x] = clip((ig - imin_g) * coeff_g + omin_g, depth); \
165  dst_b[x] = clip((ib - imin_b) * coeff_b + omin_b, depth); \
166  } \
167  } \
168  \
169  for (int x = 0; x < linesize && s->nb_comp == 4; x += step) \
170  dst_a[x] = clip((src_a[x] - imin_a) * coeff_a + omin_a, depth); \
171  \
172  src_r += src_linesize; \
173  src_g += src_linesize; \
174  src_b += src_linesize; \
175  src_a += src_linesize; \
176  \
177  dst_r += dst_linesize; \
178  dst_g += dst_linesize; \
179  dst_b += dst_linesize; \
180  dst_a += dst_linesize; \
181  }
182 
183 #define CLIP8(x, depth) av_clip_uint8(x)
184 #define CLIP16(x, depth) av_clip_uint16(x)
185 #define NOCLIP(x, depth) (x)
186 
187 static int colorlevels_slice_8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
188 {
189  ColorLevelsContext *s = ctx->priv;
190  DO_COMMON(uint8_t, int, CLIP8, 0, 0)
191  return 0;
192 }
193 
194 static int colorlevels_slice_16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
195 {
196  ColorLevelsContext *s = ctx->priv;\
197  DO_COMMON(uint16_t, int, CLIP16, 0, 0)
198  return 0;
199 }
200 
201 static int colorlevels_preserve_slice_8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
202 {
203  ColorLevelsContext *s = ctx->priv;
204  DO_COMMON(uint8_t, int, CLIP8, 1, 0)
205  return 0;
206 }
207 
208 static int colorlevels_preserve_slice_16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
209 {
210  ColorLevelsContext *s = ctx->priv;
211  DO_COMMON(uint16_t, int, CLIP16, 1, 0)
212  return 0;
213 }
214 
215 static int colorlevels_slice_8_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
216 {
217  ColorLevelsContext *s = ctx->priv;
218  DO_COMMON(uint8_t, int, CLIP8, 0, 1)
219  return 0;
220 }
221 
222 static int colorlevels_slice_9_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
223 {
224  ColorLevelsContext *s = ctx->priv;
225  const int depth = 9;
226  DO_COMMON(uint16_t, int, av_clip_uintp2, 0, 1)
227  return 0;
228 }
229 
230 static int colorlevels_slice_10_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
231 {
232  ColorLevelsContext *s = ctx->priv;
233  const int depth = 10;
234  DO_COMMON(uint16_t, int, av_clip_uintp2, 0, 1)
235  return 0;
236 }
237 
238 static int colorlevels_slice_12_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
239 {
240  ColorLevelsContext *s = ctx->priv;
241  const int depth = 12;
242  DO_COMMON(uint16_t, int, av_clip_uintp2, 0, 1)
243  return 0;
244 }
245 
246 static int colorlevels_slice_14_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
247 {
248  ColorLevelsContext *s = ctx->priv;
249  const int depth = 14;
250  DO_COMMON(uint16_t, int, av_clip_uintp2, 0, 1)
251  return 0;
252 }
253 
254 static int colorlevels_slice_16_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
255 {
256  ColorLevelsContext *s = ctx->priv;
257  DO_COMMON(uint16_t, int, CLIP16, 0, 1)
258  return 0;
259 }
260 
261 static int colorlevels_slice_32_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
262 {
263  ColorLevelsContext *s = ctx->priv;
264  DO_COMMON(float, float, NOCLIP, 0, 1)
265  return 0;
266 }
267 
268 static int colorlevels_preserve_slice_8_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
269 {
270  ColorLevelsContext *s = ctx->priv;
271  DO_COMMON(uint8_t, int, CLIP8, 1, 1)
272  return 0;
273 }
274 
275 static int colorlevels_preserve_slice_9_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
276 {
277  ColorLevelsContext *s = ctx->priv;
278  const int depth = 9;
279  DO_COMMON(uint16_t, int, av_clip_uintp2, 1, 1)
280  return 0;
281 }
282 
283 static int colorlevels_preserve_slice_10_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
284 {
285  ColorLevelsContext *s = ctx->priv;
286  const int depth = 10;
287  DO_COMMON(uint16_t, int, av_clip_uintp2, 1, 1)
288  return 0;
289 }
290 
291 static int colorlevels_preserve_slice_12_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
292 {
293  ColorLevelsContext *s = ctx->priv;
294  const int depth = 12;
295  DO_COMMON(uint16_t, int, av_clip_uintp2, 1, 1)
296  return 0;
297 }
298 
299 static int colorlevels_preserve_slice_14_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
300 {
301  ColorLevelsContext *s = ctx->priv;
302  const int depth = 14;
303  DO_COMMON(uint16_t, int, av_clip_uintp2, 1, 1)
304  return 0;
305 }
306 
307 static int colorlevels_preserve_slice_16_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
308 {
309  ColorLevelsContext *s = ctx->priv;
310  DO_COMMON(uint16_t, int, CLIP16, 1, 1)
311  return 0;
312 }
313 
314 static int colorlevels_preserve_slice_32_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
315 {
316  ColorLevelsContext *s = ctx->priv;
317  DO_COMMON(float, float, NOCLIP, 1, 1)
318  return 0;
319 }
320 
322 {
323  AVFilterContext *ctx = inlink->dst;
324  ColorLevelsContext *s = ctx->priv;
326 
327  s->nb_comp = desc->nb_components;
328  s->planar = desc->flags & AV_PIX_FMT_FLAG_PLANAR;
329  s->depth = desc->comp[0].depth;
330  s->max = (1 << s->depth) - 1;
331  s->bpp = (desc->comp[0].depth + 7) >> 3;
332  s->step = s->planar ? 1 : av_get_padded_bits_per_pixel(desc) >> (3 + (s->bpp == 2));
333  s->linesize = inlink->w * s->step;
334  ff_fill_rgba_map(s->rgba_map, inlink->format);
335 
336  if (!s->planar) {
337  s->colorlevels_slice[0] = colorlevels_slice_8;
338  s->colorlevels_slice[1] = colorlevels_preserve_slice_8;
339  if (s->bpp == 2) {
340  s->colorlevels_slice[0] = colorlevels_slice_16;
341  s->colorlevels_slice[1] = colorlevels_preserve_slice_16;
342  }
343  } else {
344  switch (s->depth) {
345  case 8:
346  s->colorlevels_slice[0] = colorlevels_slice_8_planar;
347  s->colorlevels_slice[1] = colorlevels_preserve_slice_8_planar;
348  break;
349  case 9:
350  s->colorlevels_slice[0] = colorlevels_slice_9_planar;
351  s->colorlevels_slice[1] = colorlevels_preserve_slice_9_planar;
352  break;
353  case 10:
354  s->colorlevels_slice[0] = colorlevels_slice_10_planar;
355  s->colorlevels_slice[1] = colorlevels_preserve_slice_10_planar;
356  break;
357  case 12:
358  s->colorlevels_slice[0] = colorlevels_slice_12_planar;
359  s->colorlevels_slice[1] = colorlevels_preserve_slice_12_planar;
360  break;
361  case 14:
362  s->colorlevels_slice[0] = colorlevels_slice_14_planar;
363  s->colorlevels_slice[1] = colorlevels_preserve_slice_14_planar;
364  break;
365  case 16:
366  s->colorlevels_slice[0] = colorlevels_slice_16_planar;
367  s->colorlevels_slice[1] = colorlevels_preserve_slice_16_planar;
368  break;
369  case 32:
370  s->colorlevels_slice[0] = colorlevels_slice_32_planar;
371  s->colorlevels_slice[1] = colorlevels_preserve_slice_32_planar;
372  break;
373  }
374  }
375 
376  return 0;
377 }
378 
380 {
381  AVFilterContext *ctx = inlink->dst;
382  ColorLevelsContext *s = ctx->priv;
383  AVFilterLink *outlink = ctx->outputs[0];
384  const int step = s->step;
385  ThreadData td;
386  AVFrame *out;
387 
388  if (av_frame_is_writable(in)) {
389  out = in;
390  } else {
391  out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
392  if (!out) {
393  av_frame_free(&in);
394  return AVERROR(ENOMEM);
395  }
397  }
398 
399  td.h = inlink->h;
400  td.dst_linesize = out->linesize[0];
401  td.src_linesize = in->linesize[0];
402  if (s->planar) {
403  td.srcrow[R] = in->data[2];
404  td.dstrow[R] = out->data[2];
405  td.srcrow[G] = in->data[0];
406  td.dstrow[G] = out->data[0];
407  td.srcrow[B] = in->data[1];
408  td.dstrow[B] = out->data[1];
409  td.srcrow[A] = in->data[3];
410  td.dstrow[A] = out->data[3];
411  } else {
412  td.srcrow[R] = in->data[0] + s->rgba_map[R] * s->bpp;
413  td.dstrow[R] = out->data[0] + s->rgba_map[R] * s->bpp;
414  td.srcrow[G] = in->data[0] + s->rgba_map[G] * s->bpp;
415  td.dstrow[G] = out->data[0] + s->rgba_map[G] * s->bpp;
416  td.srcrow[B] = in->data[0] + s->rgba_map[B] * s->bpp;
417  td.dstrow[B] = out->data[0] + s->rgba_map[B] * s->bpp;
418  td.srcrow[A] = in->data[0] + s->rgba_map[A] * s->bpp;
419  td.dstrow[A] = out->data[0] + s->rgba_map[A] * s->bpp;
420  }
421 
422  switch (s->bpp) {
423  case 1:
424  for (int i = 0; i < s->nb_comp; i++) {
425  Range *r = &s->range[i];
426  const uint8_t offset = s->rgba_map[i];
427  const uint8_t *srcrow = in->data[0];
428  int imin = lrint(r->in_min * UINT8_MAX);
429  int imax = lrint(r->in_max * UINT8_MAX);
430  int omin = lrint(r->out_min * UINT8_MAX);
431  int omax = lrint(r->out_max * UINT8_MAX);
432  float coeff;
433 
434  if (imin < 0) {
435  imin = UINT8_MAX;
436  for (int y = 0; y < inlink->h; y++) {
437  const uint8_t *src = srcrow;
438 
439  for (int x = 0; x < s->linesize; x += step)
440  imin = FFMIN(imin, src[x + offset]);
441  srcrow += in->linesize[0];
442  }
443  }
444  if (imax < 0) {
445  srcrow = in->data[0];
446  imax = 0;
447  for (int y = 0; y < inlink->h; y++) {
448  const uint8_t *src = srcrow;
449 
450  for (int x = 0; x < s->linesize; x += step)
451  imax = FFMAX(imax, src[x + offset]);
452  srcrow += in->linesize[0];
453  }
454  }
455 
456  coeff = (omax - omin) / (double)(imax - imin);
457 
458  td.coeff[i] = coeff;
459  td.imin[i] = imin;
460  td.omin[i] = omin;
461  }
462  break;
463  case 2:
464  for (int i = 0; i < s->nb_comp; i++) {
465  Range *r = &s->range[i];
466  const uint8_t offset = s->rgba_map[i];
467  const uint8_t *srcrow = in->data[0];
468  int imin = lrint(r->in_min * UINT16_MAX);
469  int imax = lrint(r->in_max * UINT16_MAX);
470  int omin = lrint(r->out_min * UINT16_MAX);
471  int omax = lrint(r->out_max * UINT16_MAX);
472  float coeff;
473 
474  if (imin < 0) {
475  imin = UINT16_MAX;
476  for (int y = 0; y < inlink->h; y++) {
477  const uint16_t *src = (const uint16_t *)srcrow;
478 
479  for (int x = 0; x < s->linesize; x += step)
480  imin = FFMIN(imin, src[x + offset]);
481  srcrow += in->linesize[0];
482  }
483  }
484  if (imax < 0) {
485  srcrow = in->data[0];
486  imax = 0;
487  for (int y = 0; y < inlink->h; y++) {
488  const uint16_t *src = (const uint16_t *)srcrow;
489 
490  for (int x = 0; x < s->linesize; x += step)
491  imax = FFMAX(imax, src[x + offset]);
492  srcrow += in->linesize[0];
493  }
494  }
495 
496  coeff = (omax - omin) / (double)(imax - imin);
497 
498  td.coeff[i] = coeff;
499  td.imin[i] = imin;
500  td.omin[i] = omin;
501  }
502  break;
503  case 4:
504  for (int i = 0; i < s->nb_comp; i++) {
505  Range *r = &s->range[i];
506  const uint8_t offset = s->rgba_map[i];
507  const uint8_t *srcrow = in->data[0];
508  float imin = r->in_min;
509  float imax = r->in_max;
510  float omin = r->out_min;
511  float omax = r->out_max;
512  float coeff;
513 
514  if (imin < 0.f) {
515  imin = 1.f;
516  for (int y = 0; y < inlink->h; y++) {
517  const float *src = (const float *)srcrow;
518 
519  for (int x = 0; x < s->linesize; x += step)
520  imin = fminf(imin, src[x + offset]);
521  srcrow += in->linesize[0];
522  }
523  }
524  if (imax < 0.f) {
525  srcrow = in->data[0];
526  imax = 0.f;
527  for (int y = 0; y < inlink->h; y++) {
528  const float *src = (const float *)srcrow;
529 
530  for (int x = 0; x < s->linesize; x += step)
531  imax = fmaxf(imax, src[x + offset]);
532  srcrow += in->linesize[0];
533  }
534  }
535 
536  coeff = (omax - omin) / (double)(imax - imin);
537 
538  td.coeff[i] = coeff;
539  td.fimin[i] = imin;
540  td.fomin[i] = omin;
541  }
542  break;
543  }
544 
545  ff_filter_execute(ctx, s->colorlevels_slice[s->preserve_color > 0], &td, NULL,
547 
548  if (in != out)
549  av_frame_free(&in);
550  return ff_filter_frame(outlink, out);
551 }
552 
553 static const AVFilterPad colorlevels_inputs[] = {
554  {
555  .name = "default",
556  .type = AVMEDIA_TYPE_VIDEO,
557  .filter_frame = filter_frame,
558  .config_props = config_input,
559  },
560 };
561 
563  {
564  .name = "default",
565  .type = AVMEDIA_TYPE_VIDEO,
566  },
567 };
568 
570  .name = "colorlevels",
571  .description = NULL_IF_CONFIG_SMALL("Adjust the color levels."),
572  .priv_size = sizeof(ColorLevelsContext),
573  .priv_class = &colorlevels_class,
591  .process_command = ff_filter_process_command,
592 };
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:101
AV_PIX_FMT_GBRAP16
#define AV_PIX_FMT_GBRAP16
Definition: pixfmt.h:481
colorlevels_slice_8_planar
static int colorlevels_slice_8_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colorlevels.c:215
td
#define td
Definition: regdef.h:70
ThreadData::coeff
float coeff[4]
Definition: vf_colorlevels.c:96
ColorLevelsContext::linesize
int linesize
Definition: vf_colorlevels.c:53
r
const char * r
Definition: vf_curves.c:126
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
preserve_color.h
out
FILE * out
Definition: movenc.c:54
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:969
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2888
av_clip_uintp2
#define av_clip_uintp2
Definition: common.h:119
colorlevels_slice_16_planar
static int colorlevels_slice_16_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colorlevels.c:254
colorlevels_preserve_slice_16
static int colorlevels_preserve_slice_16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colorlevels.c:208
filter_frame
static int filter_frame(AVFilterLink *inlink, AVFrame *in)
Definition: vf_colorlevels.c:379
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
ColorLevelsContext::preserve_color
int preserve_color
Definition: vf_colorlevels.c:44
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:99
ThreadData::srcrow
const uint8_t * srcrow[4]
Definition: vf_colorlevels.c:91
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:330
pixdesc.h
colorlevels_inputs
static const AVFilterPad colorlevels_inputs[]
Definition: vf_colorlevels.c:553
step
trying all byte sequences megabyte in length and selecting the best looking sequence will yield cases to try But a word about which is also called distortion Distortion can be quantified by almost any quality measurement one chooses the sum of squared differences is used but more complex methods that consider psychovisual effects can be used as well It makes no difference in this discussion First step
Definition: rate_distortion.txt:58
preserve_color
static void preserve_color(int preserve_color, float ir, float ig, float ib, float r, float g, float b, float max, float *icolor, float *ocolor)
Definition: preserve_color.h:53
AVOption
AVOption.
Definition: opt.h:251
P_AVG
@ P_AVG
Definition: preserve_color.h:30
colorlevels_slice_32_planar
static int colorlevels_slice_32_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colorlevels.c:261
AV_PIX_FMT_BGR24
@ AV_PIX_FMT_BGR24
packed RGB 8:8:8, 24bpp, BGRBGR...
Definition: pixfmt.h:69
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:95
colorlevels_slice_9_planar
static int colorlevels_slice_9_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colorlevels.c:222
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:165
video.h
colorlevels_preserve_slice_12_planar
static int colorlevels_preserve_slice_12_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colorlevels.c:291
config_input
static int config_input(AVFilterLink *inlink)
Definition: vf_colorlevels.c:321
A
#define A
Definition: vf_colorlevels.c:34
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:351
NOCLIP
#define NOCLIP(x, depth)
Definition: vf_colorlevels.c:185
formats.h
ff_vf_colorlevels
const AVFilter ff_vf_colorlevels
Definition: vf_colorlevels.c:569
AV_PIX_FMT_GBRP14
#define AV_PIX_FMT_GBRP14
Definition: pixfmt.h:477
AV_PIX_FMT_GBRAP
@ AV_PIX_FMT_GBRAP
planar GBRA 4:4:4:4 32bpp
Definition: pixfmt.h:205
AV_PIX_FMT_GBRP10
#define AV_PIX_FMT_GBRP10
Definition: pixfmt.h:475
colorlevels_preserve_slice_8_planar
static int colorlevels_preserve_slice_8_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colorlevels.c:268
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:49
ColorLevelsContext
Definition: vf_colorlevels.c:41
ThreadData::imin
int imin[4]
Definition: vf_colorlevels.c:102
lrint
#define lrint
Definition: tablegen.h:53
colorlevels_slice_10_planar
static int colorlevels_slice_10_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colorlevels.c:230
AV_PIX_FMT_GBRAP10
#define AV_PIX_FMT_GBRAP10
Definition: pixfmt.h:479
s
#define s(width, name)
Definition: cbs_vp9.c:256
AV_PIX_FMT_GBRAP12
#define AV_PIX_FMT_GBRAP12
Definition: pixfmt.h:480
colorlevels_preserve_slice_8
static int colorlevels_preserve_slice_8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colorlevels.c:201
colorlevels_slice_16
static int colorlevels_slice_16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colorlevels.c:194
Range::in_min
double in_min
Definition: vf_colorlevels.c:37
AV_OPT_TYPE_DOUBLE
@ AV_OPT_TYPE_DOUBLE
Definition: opt.h:227
fminf
float fminf(float, float)
colorlevels_preserve_slice_14_planar
static int colorlevels_preserve_slice_14_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colorlevels.c:299
ctx
AVFormatContext * ctx
Definition: movenc.c:48
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: internal.h:194
ThreadData::h
int h
Definition: vf_blend.c:59
AV_PIX_FMT_RGBA
@ AV_PIX_FMT_RGBA
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:93
arg
const char * arg
Definition: jacosubdec.c:67
AV_PIX_FMT_GBRP16
#define AV_PIX_FMT_GBRP16
Definition: pixfmt.h:478
ColorLevelsContext::range
Range range[4]
Definition: vf_colorlevels.c:43
AV_PIX_FMT_RGBA64
#define AV_PIX_FMT_RGBA64
Definition: pixfmt.h:449
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
AV_PIX_FMT_BGR48
#define AV_PIX_FMT_BGR48
Definition: pixfmt.h:450
NULL
#define NULL
Definition: coverity.c:32
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:594
colorlevels_preserve_slice_16_planar
static int colorlevels_preserve_slice_16_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colorlevels.c:307
P_PWR
@ P_PWR
Definition: preserve_color.h:33
ColorLevelsContext::planar
int planar
Definition: vf_colorlevels.c:49
AV_PIX_FMT_BGR0
@ AV_PIX_FMT_BGR0
packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
Definition: pixfmt.h:258
colorlevels_preserve_slice_32_planar
static int colorlevels_preserve_slice_32_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colorlevels.c:314
AV_PIX_FMT_GBRP9
#define AV_PIX_FMT_GBRP9
Definition: pixfmt.h:474
AV_PIX_FMT_ABGR
@ AV_PIX_FMT_ABGR
packed ABGR 8:8:8:8, 32bpp, ABGRABGR...
Definition: pixfmt.h:94
Range
Definition: vf_colorbalance.c:38
CLIP8
#define CLIP8(x, depth)
Definition: vf_colorlevels.c:183
P_LUM
@ P_LUM
Definition: preserve_color.h:28
G
#define G
Definition: vf_colorlevels.c:32
OFFSET
#define OFFSET(x)
Definition: vf_colorlevels.c:58
f
f
Definition: af_crystalizer.c:122
AV_PIX_FMT_RGB24
@ AV_PIX_FMT_RGB24
packed RGB 8:8:8, 24bpp, RGBRGB...
Definition: pixfmt.h:68
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:115
ColorLevelsContext::rgba_map
uint8_t rgba_map[4]
Definition: vf_colorlevels.c:52
av_get_padded_bits_per_pixel
int av_get_padded_bits_per_pixel(const AVPixFmtDescriptor *pixdesc)
Return the number of bits per pixel for the pixel format described by pixdesc, including any padding ...
Definition: pixdesc.c:2853
FILTER_PIXFMTS
#define FILTER_PIXFMTS(...)
Definition: internal.h:180
fmaxf
float fmaxf(float, float)
AV_PIX_FMT_GBRPF32
#define AV_PIX_FMT_GBRPF32
Definition: pixfmt.h:488
NB_PRESERVE
@ NB_PRESERVE
Definition: preserve_color.h:34
AV_PIX_FMT_RGB48
#define AV_PIX_FMT_RGB48
Definition: pixfmt.h:445
P_NONE
@ P_NONE
Definition: preserve_color.h:27
DO_COMMON
#define DO_COMMON(type, ptype, clip, preserve, planar)
Definition: vf_colorlevels.c:106
av_frame_is_writable
int av_frame_is_writable(AVFrame *frame)
Check if the frame data is writable.
Definition: frame.c:524
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(colorlevels)
ff_filter_process_command
int ff_filter_process_command(AVFilterContext *ctx, const char *cmd, const char *arg, char *res, int res_len, int flags)
Generic processing of user supplied commands that are set in the same way as the filter options.
Definition: avfilter.c:842
B
#define B
Definition: vf_colorlevels.c:33
P_NRM
@ P_NRM
Definition: preserve_color.h:32
offset
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
Definition: writing_filters.txt:86
AV_PIX_FMT_RGB0
@ AV_PIX_FMT_RGB0
packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined
Definition: pixfmt.h:256
internal.h
AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC
#define AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC
Some filters support a generic "enable" expression option that can be used to enable or disable a fil...
Definition: avfilter.h:142
AV_PIX_FMT_ARGB
@ AV_PIX_FMT_ARGB
packed ARGB 8:8:8:8, 32bpp, ARGBARGB...
Definition: pixfmt.h:92
colorlevels_slice_8
static int colorlevels_slice_8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colorlevels.c:187
AV_PIX_FMT_BGRA64
#define AV_PIX_FMT_BGRA64
Definition: pixfmt.h:454
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
AV_PIX_FMT_GBRP12
#define AV_PIX_FMT_GBRP12
Definition: pixfmt.h:476
ColorLevelsContext::bpp
int bpp
Definition: vf_colorlevels.c:50
ff_filter_get_nb_threads
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:777
ThreadData
Used for passing data between threads.
Definition: dsddec.c:69
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:55
ColorLevelsContext::nb_comp
int nb_comp
Definition: vf_colorlevels.c:46
Range::out_min
double out_min
Definition: vf_colorlevels.c:38
colorlevels_preserve_slice_9_planar
static int colorlevels_preserve_slice_9_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colorlevels.c:275
AVFilter
Filter definition.
Definition: avfilter.h:161
AV_PIX_FMT_0BGR
@ AV_PIX_FMT_0BGR
packed BGR 8:8:8, 32bpp, XBGRXBGR... X=unused/undefined
Definition: pixfmt.h:257
ThreadData::dstrow
uint8_t * dstrow[4]
Definition: vf_colorlevels.c:92
colorlevels_outputs
static const AVFilterPad colorlevels_outputs[]
Definition: vf_colorlevels.c:562
ColorLevelsContext::colorlevels_slice
int(* colorlevels_slice[2])(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colorlevels.c:55
ThreadData::src_linesize
int src_linesize
Definition: vf_bm3d.c:56
colorlevels_options
static const AVOption colorlevels_options[]
Definition: vf_colorlevels.c:60
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:225
avfilter.h
AV_PIX_FMT_GBRAPF32
#define AV_PIX_FMT_GBRAPF32
Definition: pixfmt.h:489
P_MAX
@ P_MAX
Definition: preserve_color.h:29
ColorLevelsContext::step
int step
Definition: vf_colorlevels.c:51
AV_PIX_FMT_FLAG_PLANAR
#define AV_PIX_FMT_FLAG_PLANAR
At least one pixel component is not in the first data plane.
Definition: pixdesc.h:132
ColorLevelsContext::max
int max
Definition: vf_colorlevels.c:48
AVFilterContext
An instance of a filter.
Definition: avfilter.h:392
FLAGS
#define FLAGS
Definition: vf_colorlevels.c:59
ThreadData::fomin
float fomin[4]
Definition: vf_colorlevels.c:101
AV_PIX_FMT_GBRP
@ AV_PIX_FMT_GBRP
planar GBR 4:4:4 24bpp
Definition: pixfmt.h:158
AVFILTER_FLAG_SLICE_THREADS
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:117
ColorLevelsContext::depth
int depth
Definition: vf_colorlevels.c:47
desc
const char * desc
Definition: libsvtav1.c:83
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
colorlevels_slice_12_planar
static int colorlevels_slice_12_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colorlevels.c:238
ThreadData::dst_linesize
int dst_linesize
Definition: vf_colorlevels.c:93
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
ThreadData::omin
int omin[4]
Definition: vf_colorlevels.c:103
ThreadData::fimin
float fimin[4]
Definition: vf_colorlevels.c:100
colorlevels_preserve_slice_10_planar
static int colorlevels_preserve_slice_10_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colorlevels.c:283
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: internal.h:195
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
ff_fill_rgba_map
int ff_fill_rgba_map(uint8_t *rgba_map, enum AVPixelFormat pix_fmt)
Definition: drawutils.c:35
imgutils.h
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:375
AV_PIX_FMT_0RGB
@ AV_PIX_FMT_0RGB
packed RGB 8:8:8, 32bpp, XRGBXRGB... X=unused/undefined
Definition: pixfmt.h:255
coeff
static const double coeff[2][5]
Definition: vf_owdenoise.c:78
colorlevels_slice_14_planar
static int colorlevels_slice_14_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
Definition: vf_colorlevels.c:246
drawutils.h
ff_filter_execute
static av_always_inline int ff_filter_execute(AVFilterContext *ctx, avfilter_action_func *func, void *arg, int *ret, int nb_jobs)
Definition: internal.h:146
P_SUM
@ P_SUM
Definition: preserve_color.h:31
int
int
Definition: ffmpeg_filter.c:156
Range::in_max
double in_max
Definition: vf_colorlevels.c:37
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:234
Range::out_max
double out_max
Definition: vf_colorlevels.c:38
CLIP16
#define CLIP16(x, depth)
Definition: vf_colorlevels.c:184
R
#define R
Definition: vf_colorlevels.c:31