FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
vf_colorchannelmixer.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2013 Paul B Mahol
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 #include "libavutil/opt.h"
22 #include "libavutil/pixdesc.h"
23 #include "avfilter.h"
24 #include "drawutils.h"
25 #include "formats.h"
26 #include "internal.h"
27 #include "video.h"
28 
29 #define R 0
30 #define G 1
31 #define B 2
32 #define A 3
33 
34 typedef struct ThreadData {
35  AVFrame *in, *out;
36 } ThreadData;
37 
38 typedef struct ColorChannelMixerContext {
39  const AVClass *class;
40  double rr, rg, rb, ra;
41  double gr, gg, gb, ga;
42  double br, bg, bb, ba;
43  double ar, ag, ab, aa;
44 
45  int *lut[4][4];
46 
47  int *buffer;
48 
50 
51  int (*filter_slice)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
53 
54 #define OFFSET(x) offsetof(ColorChannelMixerContext, x)
55 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
57  { "rr", "set the red gain for the red channel", OFFSET(rr), AV_OPT_TYPE_DOUBLE, {.dbl=1}, -2, 2, FLAGS },
58  { "rg", "set the green gain for the red channel", OFFSET(rg), AV_OPT_TYPE_DOUBLE, {.dbl=0}, -2, 2, FLAGS },
59  { "rb", "set the blue gain for the red channel", OFFSET(rb), AV_OPT_TYPE_DOUBLE, {.dbl=0}, -2, 2, FLAGS },
60  { "ra", "set the alpha gain for the red channel", OFFSET(ra), AV_OPT_TYPE_DOUBLE, {.dbl=0}, -2, 2, FLAGS },
61  { "gr", "set the red gain for the green channel", OFFSET(gr), AV_OPT_TYPE_DOUBLE, {.dbl=0}, -2, 2, FLAGS },
62  { "gg", "set the green gain for the green channel", OFFSET(gg), AV_OPT_TYPE_DOUBLE, {.dbl=1}, -2, 2, FLAGS },
63  { "gb", "set the blue gain for the green channel", OFFSET(gb), AV_OPT_TYPE_DOUBLE, {.dbl=0}, -2, 2, FLAGS },
64  { "ga", "set the alpha gain for the green channel", OFFSET(ga), AV_OPT_TYPE_DOUBLE, {.dbl=0}, -2, 2, FLAGS },
65  { "br", "set the red gain for the blue channel", OFFSET(br), AV_OPT_TYPE_DOUBLE, {.dbl=0}, -2, 2, FLAGS },
66  { "bg", "set the green gain for the blue channel", OFFSET(bg), AV_OPT_TYPE_DOUBLE, {.dbl=0}, -2, 2, FLAGS },
67  { "bb", "set the blue gain for the blue channel", OFFSET(bb), AV_OPT_TYPE_DOUBLE, {.dbl=1}, -2, 2, FLAGS },
68  { "ba", "set the alpha gain for the blue channel", OFFSET(ba), AV_OPT_TYPE_DOUBLE, {.dbl=0}, -2, 2, FLAGS },
69  { "ar", "set the red gain for the alpha channel", OFFSET(ar), AV_OPT_TYPE_DOUBLE, {.dbl=0}, -2, 2, FLAGS },
70  { "ag", "set the green gain for the alpha channel", OFFSET(ag), AV_OPT_TYPE_DOUBLE, {.dbl=0}, -2, 2, FLAGS },
71  { "ab", "set the blue gain for the alpha channel", OFFSET(ab), AV_OPT_TYPE_DOUBLE, {.dbl=0}, -2, 2, FLAGS },
72  { "aa", "set the alpha gain for the alpha channel", OFFSET(aa), AV_OPT_TYPE_DOUBLE, {.dbl=1}, -2, 2, FLAGS },
73  { NULL }
74 };
75 
76 AVFILTER_DEFINE_CLASS(colorchannelmixer);
77 
79 {
80  static const enum AVPixelFormat pix_fmts[] = {
95  };
96 
97  AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
98  if (!fmts_list)
99  return AVERROR(ENOMEM);
100  return ff_set_common_formats(ctx, fmts_list);
101 }
102 
103 static av_always_inline int filter_slice_rgba_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs,
104  int have_alpha)
105 {
107  ThreadData *td = arg;
108  AVFrame *in = td->in;
109  AVFrame *out = td->out;
110  const int slice_start = (out->height * jobnr) / nb_jobs;
111  const int slice_end = (out->height * (jobnr+1)) / nb_jobs;
112  const uint8_t *srcg = in->data[0] + slice_start * in->linesize[0];
113  const uint8_t *srcb = in->data[1] + slice_start * in->linesize[1];
114  const uint8_t *srcr = in->data[2] + slice_start * in->linesize[2];
115  const uint8_t *srca = in->data[3] + slice_start * in->linesize[3];
116  uint8_t *dstg = out->data[0] + slice_start * out->linesize[0];
117  uint8_t *dstb = out->data[1] + slice_start * out->linesize[1];
118  uint8_t *dstr = out->data[2] + slice_start * out->linesize[2];
119  uint8_t *dsta = out->data[3] + slice_start * out->linesize[3];
120  int i, j;
121 
122  for (i = slice_start; i < slice_end; i++) {
123  for (j = 0; j < out->width; j++) {
124  const uint8_t rin = srcr[j];
125  const uint8_t gin = srcg[j];
126  const uint8_t bin = srcb[j];
127  const uint8_t ain = srca[j];
128 
129  dstr[j] = av_clip_uint8(s->lut[R][R][rin] +
130  s->lut[R][G][gin] +
131  s->lut[R][B][bin] +
132  (have_alpha == 1 ? s->lut[R][A][ain] : 0));
133  dstg[j] = av_clip_uint8(s->lut[G][R][rin] +
134  s->lut[G][G][gin] +
135  s->lut[G][B][bin] +
136  (have_alpha == 1 ? s->lut[G][A][ain] : 0));
137  dstb[j] = av_clip_uint8(s->lut[B][R][rin] +
138  s->lut[B][G][gin] +
139  s->lut[B][B][bin] +
140  (have_alpha == 1 ? s->lut[B][A][ain] : 0));
141  if (have_alpha == 1) {
142  dsta[j] = av_clip_uint8(s->lut[A][R][rin] +
143  s->lut[A][G][gin] +
144  s->lut[A][B][bin] +
145  s->lut[A][A][ain]);
146  }
147  }
148 
149  srcg += in->linesize[0];
150  srcb += in->linesize[1];
151  srcr += in->linesize[2];
152  srca += in->linesize[3];
153  dstg += out->linesize[0];
154  dstb += out->linesize[1];
155  dstr += out->linesize[2];
156  dsta += out->linesize[3];
157  }
158 
159  return 0;
160 }
161 
162 static av_always_inline int filter_slice_rgba16_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs,
163  int have_alpha, int depth)
164 {
166  ThreadData *td = arg;
167  AVFrame *in = td->in;
168  AVFrame *out = td->out;
169  const int slice_start = (out->height * jobnr) / nb_jobs;
170  const int slice_end = (out->height * (jobnr+1)) / nb_jobs;
171  const uint16_t *srcg = (const uint16_t *)(in->data[0] + slice_start * in->linesize[0]);
172  const uint16_t *srcb = (const uint16_t *)(in->data[1] + slice_start * in->linesize[1]);
173  const uint16_t *srcr = (const uint16_t *)(in->data[2] + slice_start * in->linesize[2]);
174  const uint16_t *srca = (const uint16_t *)(in->data[3] + slice_start * in->linesize[3]);
175  uint16_t *dstg = (uint16_t *)(out->data[0] + slice_start * out->linesize[0]);
176  uint16_t *dstb = (uint16_t *)(out->data[1] + slice_start * out->linesize[1]);
177  uint16_t *dstr = (uint16_t *)(out->data[2] + slice_start * out->linesize[2]);
178  uint16_t *dsta = (uint16_t *)(out->data[3] + slice_start * out->linesize[3]);
179  int i, j;
180 
181  for (i = slice_start; i < slice_end; i++) {
182  for (j = 0; j < out->width; j++) {
183  const uint16_t rin = srcr[j];
184  const uint16_t gin = srcg[j];
185  const uint16_t bin = srcb[j];
186  const uint16_t ain = srca[j];
187 
188  dstr[j] = av_clip_uintp2(s->lut[R][R][rin] +
189  s->lut[R][G][gin] +
190  s->lut[R][B][bin] +
191  (have_alpha == 1 ? s->lut[R][A][ain] : 0), depth);
192  dstg[j] = av_clip_uintp2(s->lut[G][R][rin] +
193  s->lut[G][G][gin] +
194  s->lut[G][B][bin] +
195  (have_alpha == 1 ? s->lut[G][A][ain] : 0), depth);
196  dstb[j] = av_clip_uintp2(s->lut[B][R][rin] +
197  s->lut[B][G][gin] +
198  s->lut[B][B][bin] +
199  (have_alpha == 1 ? s->lut[B][A][ain] : 0), depth);
200  if (have_alpha == 1) {
201  dsta[j] = av_clip_uintp2(s->lut[A][R][rin] +
202  s->lut[A][G][gin] +
203  s->lut[A][B][bin] +
204  s->lut[A][A][ain], depth);
205  }
206  }
207 
208  srcg += in->linesize[0] / 2;
209  srcb += in->linesize[1] / 2;
210  srcr += in->linesize[2] / 2;
211  srca += in->linesize[3] / 2;
212  dstg += out->linesize[0] / 2;
213  dstb += out->linesize[1] / 2;
214  dstr += out->linesize[2] / 2;
215  dsta += out->linesize[3] / 2;
216  }
217 
218  return 0;
219 }
220 
221 static int filter_slice_gbrp(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
222 {
223  return filter_slice_rgba_planar(ctx, arg, jobnr, nb_jobs, 0);
224 }
225 
226 static int filter_slice_gbrap(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
227 {
228  return filter_slice_rgba_planar(ctx, arg, jobnr, nb_jobs, 1);
229 }
230 
231 static int filter_slice_gbrp9(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
232 {
233  return filter_slice_rgba16_planar(ctx, arg, jobnr, nb_jobs, 0, 9);
234 }
235 
236 static int filter_slice_gbrp10(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
237 {
238  return filter_slice_rgba16_planar(ctx, arg, jobnr, nb_jobs, 0, 10);
239 }
240 
241 static int filter_slice_gbrap10(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
242 {
243  return filter_slice_rgba16_planar(ctx, arg, jobnr, nb_jobs, 1, 10);
244 }
245 
246 static int filter_slice_gbrp12(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
247 {
248  return filter_slice_rgba16_planar(ctx, arg, jobnr, nb_jobs, 0, 12);
249 }
250 
251 static int filter_slice_gbrap12(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
252 {
253  return filter_slice_rgba16_planar(ctx, arg, jobnr, nb_jobs, 1, 12);
254 }
255 
256 static int filter_slice_gbrp14(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
257 {
258  return filter_slice_rgba16_planar(ctx, arg, jobnr, nb_jobs, 0, 14);
259 }
260 
261 static int filter_slice_gbrp16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
262 {
263  return filter_slice_rgba16_planar(ctx, arg, jobnr, nb_jobs, 0, 16);
264 }
265 
266 static int filter_slice_gbrap16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
267 {
268  return filter_slice_rgba16_planar(ctx, arg, jobnr, nb_jobs, 1, 16);
269 }
270 
271 static av_always_inline int filter_slice_rgba_packed(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs,
272  int have_alpha, int step)
273 {
275  ThreadData *td = arg;
276  AVFrame *in = td->in;
277  AVFrame *out = td->out;
278  const int slice_start = (out->height * jobnr) / nb_jobs;
279  const int slice_end = (out->height * (jobnr+1)) / nb_jobs;
280  const uint8_t roffset = s->rgba_map[R];
281  const uint8_t goffset = s->rgba_map[G];
282  const uint8_t boffset = s->rgba_map[B];
283  const uint8_t aoffset = s->rgba_map[A];
284  const uint8_t *srcrow = in->data[0] + slice_start * in->linesize[0];
285  uint8_t *dstrow = out->data[0] + slice_start * out->linesize[0];
286  int i, j;
287 
288  for (i = slice_start; i < slice_end; i++) {
289  const uint8_t *src = srcrow;
290  uint8_t *dst = dstrow;
291 
292  for (j = 0; j < out->width * step; j += step) {
293  const uint8_t rin = src[j + roffset];
294  const uint8_t gin = src[j + goffset];
295  const uint8_t bin = src[j + boffset];
296  const uint8_t ain = src[j + aoffset];
297 
298  dst[j + roffset] = av_clip_uint8(s->lut[R][R][rin] +
299  s->lut[R][G][gin] +
300  s->lut[R][B][bin] +
301  (have_alpha == 1 ? s->lut[R][A][ain] : 0));
302  dst[j + goffset] = av_clip_uint8(s->lut[G][R][rin] +
303  s->lut[G][G][gin] +
304  s->lut[G][B][bin] +
305  (have_alpha == 1 ? s->lut[G][A][ain] : 0));
306  dst[j + boffset] = av_clip_uint8(s->lut[B][R][rin] +
307  s->lut[B][G][gin] +
308  s->lut[B][B][bin] +
309  (have_alpha == 1 ? s->lut[B][A][ain] : 0));
310  if (have_alpha == 1) {
311  dst[j + aoffset] = av_clip_uint8(s->lut[A][R][rin] +
312  s->lut[A][G][gin] +
313  s->lut[A][B][bin] +
314  s->lut[A][A][ain]);
315  } else if (have_alpha == -1 && in != out)
316  dst[j + aoffset] = 0;
317  }
318 
319  srcrow += in->linesize[0];
320  dstrow += out->linesize[0];
321  }
322 
323  return 0;
324 }
325 
326 static av_always_inline int filter_slice_rgba16_packed(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs,
327  int have_alpha, int step)
328 {
330  ThreadData *td = arg;
331  AVFrame *in = td->in;
332  AVFrame *out = td->out;
333  const int slice_start = (out->height * jobnr) / nb_jobs;
334  const int slice_end = (out->height * (jobnr+1)) / nb_jobs;
335  const uint8_t roffset = s->rgba_map[R];
336  const uint8_t goffset = s->rgba_map[G];
337  const uint8_t boffset = s->rgba_map[B];
338  const uint8_t aoffset = s->rgba_map[A];
339  const uint8_t *srcrow = in->data[0] + slice_start * in->linesize[0];
340  uint8_t *dstrow = out->data[0] + slice_start * out->linesize[0];
341  int i, j;
342 
343  for (i = slice_start; i < slice_end; i++) {
344  const uint16_t *src = (const uint16_t *)srcrow;
345  uint16_t *dst = (uint16_t *)dstrow;
346 
347  for (j = 0; j < out->width * step; j += step) {
348  const uint16_t rin = src[j + roffset];
349  const uint16_t gin = src[j + goffset];
350  const uint16_t bin = src[j + boffset];
351  const uint16_t ain = src[j + aoffset];
352 
353  dst[j + roffset] = av_clip_uint16(s->lut[R][R][rin] +
354  s->lut[R][G][gin] +
355  s->lut[R][B][bin] +
356  (have_alpha == 1 ? s->lut[R][A][ain] : 0));
357  dst[j + goffset] = av_clip_uint16(s->lut[G][R][rin] +
358  s->lut[G][G][gin] +
359  s->lut[G][B][bin] +
360  (have_alpha == 1 ? s->lut[G][A][ain] : 0));
361  dst[j + boffset] = av_clip_uint16(s->lut[B][R][rin] +
362  s->lut[B][G][gin] +
363  s->lut[B][B][bin] +
364  (have_alpha == 1 ? s->lut[B][A][ain] : 0));
365  if (have_alpha == 1) {
366  dst[j + aoffset] = av_clip_uint16(s->lut[A][R][rin] +
367  s->lut[A][G][gin] +
368  s->lut[A][B][bin] +
369  s->lut[A][A][ain]);
370  }
371  }
372 
373  srcrow += in->linesize[0];
374  dstrow += out->linesize[0];
375  }
376 
377  return 0;
378 }
379 
380 static int filter_slice_rgba64(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
381 {
382  return filter_slice_rgba16_packed(ctx, arg, jobnr, nb_jobs, 1, 4);
383 }
384 
385 static int filter_slice_rgb48(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
386 {
387  return filter_slice_rgba16_packed(ctx, arg, jobnr, nb_jobs, 0, 3);
388 }
389 
390 static int filter_slice_rgba(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
391 {
392  return filter_slice_rgba_packed(ctx, arg, jobnr, nb_jobs, 1, 4);
393 }
394 
395 static int filter_slice_rgb24(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
396 {
397  return filter_slice_rgba_packed(ctx, arg, jobnr, nb_jobs, 0, 3);
398 }
399 
400 static int filter_slice_rgb0(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
401 {
402  return filter_slice_rgba_packed(ctx, arg, jobnr, nb_jobs, -1, 4);
403 }
404 
405 static int config_output(AVFilterLink *outlink)
406 {
407  AVFilterContext *ctx = outlink->src;
410  const int depth = desc->comp[0].depth;
411  int i, j, size, *buffer;
412 
413  ff_fill_rgba_map(s->rgba_map, outlink->format);
414 
415  size = 1 << depth;
416  s->buffer = buffer = av_malloc(16 * size * sizeof(*s->buffer));
417  if (!s->buffer)
418  return AVERROR(ENOMEM);
419 
420  for (i = 0; i < 4; i++)
421  for (j = 0; j < 4; j++, buffer += size)
422  s->lut[i][j] = buffer;
423 
424  for (i = 0; i < size; i++) {
425  s->lut[R][R][i] = lrint(i * s->rr);
426  s->lut[R][G][i] = lrint(i * s->rg);
427  s->lut[R][B][i] = lrint(i * s->rb);
428  s->lut[R][A][i] = lrint(i * s->ra);
429 
430  s->lut[G][R][i] = lrint(i * s->gr);
431  s->lut[G][G][i] = lrint(i * s->gg);
432  s->lut[G][B][i] = lrint(i * s->gb);
433  s->lut[G][A][i] = lrint(i * s->ga);
434 
435  s->lut[B][R][i] = lrint(i * s->br);
436  s->lut[B][G][i] = lrint(i * s->bg);
437  s->lut[B][B][i] = lrint(i * s->bb);
438  s->lut[B][A][i] = lrint(i * s->ba);
439 
440  s->lut[A][R][i] = lrint(i * s->ar);
441  s->lut[A][G][i] = lrint(i * s->ag);
442  s->lut[A][B][i] = lrint(i * s->ab);
443  s->lut[A][A][i] = lrint(i * s->aa);
444  }
445 
446  switch (outlink->format) {
447  case AV_PIX_FMT_BGR24:
448  case AV_PIX_FMT_RGB24:
450  break;
451  case AV_PIX_FMT_0BGR:
452  case AV_PIX_FMT_0RGB:
453  case AV_PIX_FMT_BGR0:
454  case AV_PIX_FMT_RGB0:
456  break;
457  case AV_PIX_FMT_ABGR:
458  case AV_PIX_FMT_ARGB:
459  case AV_PIX_FMT_BGRA:
460  case AV_PIX_FMT_RGBA:
462  break;
463  case AV_PIX_FMT_BGR48:
464  case AV_PIX_FMT_RGB48:
466  break;
467  case AV_PIX_FMT_BGRA64:
468  case AV_PIX_FMT_RGBA64:
470  break;
471  case AV_PIX_FMT_GBRP:
473  break;
474  case AV_PIX_FMT_GBRAP:
476  break;
477  case AV_PIX_FMT_GBRP9:
479  break;
480  case AV_PIX_FMT_GBRP10:
482  break;
483  case AV_PIX_FMT_GBRAP10:
485  break;
486  case AV_PIX_FMT_GBRP12:
488  break;
489  case AV_PIX_FMT_GBRAP12:
491  break;
492  case AV_PIX_FMT_GBRP14:
494  break;
495  case AV_PIX_FMT_GBRP16:
497  break;
498  case AV_PIX_FMT_GBRAP16:
500  break;
501  }
502 
503  return 0;
504 }
505 
506 static int filter_frame(AVFilterLink *inlink, AVFrame *in)
507 {
508  AVFilterContext *ctx = inlink->dst;
510  AVFilterLink *outlink = ctx->outputs[0];
511  ThreadData td;
512  AVFrame *out;
513 
514  if (av_frame_is_writable(in)) {
515  out = in;
516  } else {
517  out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
518  if (!out) {
519  av_frame_free(&in);
520  return AVERROR(ENOMEM);
521  }
522  av_frame_copy_props(out, in);
523  }
524 
525  td.in = in;
526  td.out = out;
527  ctx->internal->execute(ctx, s->filter_slice, &td, NULL, FFMIN(outlink->h, ff_filter_get_nb_threads(ctx)));
528 
529  if (in != out)
530  av_frame_free(&in);
531  return ff_filter_frame(outlink, out);
532 }
533 
535 {
537 
538  av_freep(&s->buffer);
539 }
540 
542  {
543  .name = "default",
544  .type = AVMEDIA_TYPE_VIDEO,
545  .filter_frame = filter_frame,
546  },
547  { NULL }
548 };
549 
551  {
552  .name = "default",
553  .type = AVMEDIA_TYPE_VIDEO,
554  .config_props = config_output,
555  },
556  { NULL }
557 };
558 
560  .name = "colorchannelmixer",
561  .description = NULL_IF_CONFIG_SMALL("Adjust colors by mixing color channels."),
562  .priv_size = sizeof(ColorChannelMixerContext),
563  .priv_class = &colorchannelmixer_class,
564  .uninit = uninit,
566  .inputs = colorchannelmixer_inputs,
567  .outputs = colorchannelmixer_outputs,
569 };
#define NULL
Definition: coverity.c:32
AVFILTER_DEFINE_CLASS(colorchannelmixer)
AVFrame * out
Definition: af_adeclick.c:485
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2446
This structure describes decoded (raw) audio or video data.
Definition: frame.h:226
static int query_formats(AVFilterContext *ctx)
AVOption.
Definition: opt.h:246
#define AV_PIX_FMT_GBRAP10
Definition: pixfmt.h:399
Main libavfilter public API header.
packed RGB 8:8:8, 24bpp, RGBRGB...
Definition: pixfmt.h:68
const char * desc
Definition: nvenc.c:65
#define AV_PIX_FMT_RGBA64
Definition: pixfmt.h:369
planar GBR 4:4:4 24bpp
Definition: pixfmt.h:168
static int filter_slice_rgba(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
#define AV_PIX_FMT_GBRP10
Definition: pixfmt.h:395
#define AV_PIX_FMT_BGRA64
Definition: pixfmt.h:374
static int filter_slice_gbrap10(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
packed BGR 8:8:8, 32bpp, XBGRXBGR... X=unused/undefined
Definition: pixfmt.h:239
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:99
static av_always_inline int filter_slice_rgba16_packed(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs, int have_alpha, int step)
#define src
Definition: vp8dsp.c:254
#define OFFSET(x)
static int filter_slice_rgb0(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
int(* filter_slice)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:283
#define AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC
Some filters support a generic "enable" expression option that can be used to enable or disable a fil...
Definition: avfilter.h:125
static int filter_slice_gbrp12(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
const char * name
Pad name.
Definition: internal.h:60
static int filter_slice_gbrp9(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1080
AVFrame * in
Definition: af_afftdn.c:1082
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
Definition: pixdesc.h:117
uint8_t
#define av_cold
Definition: attributes.h:82
#define av_malloc(s)
packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined
Definition: pixfmt.h:238
#define A
AVOptions.
static int filter_slice_gbrap12(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
static const AVFilterPad colorchannelmixer_inputs[]
#define AV_PIX_FMT_GBRP9
Definition: pixfmt.h:394
packed ABGR 8:8:8:8, 32bpp, ABGRABGR...
Definition: pixfmt.h:94
static int filter_slice_gbrp14(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
#define AV_PIX_FMT_BGR48
Definition: pixfmt.h:370
ptrdiff_t size
Definition: opengl_enc.c:101
static int filter_slice_gbrap(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
#define G
A filter pad used for either input or output.
Definition: internal.h:54
static int filter_frame(AVFilterLink *inlink, AVFrame *in)
static av_always_inline int filter_slice_rgba16_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs, int have_alpha, int depth)
int width
Definition: frame.h:284
int ff_set_common_formats(AVFilterContext *ctx, AVFilterFormats *formats)
A helper for query_formats() which sets all links to the same list of formats.
Definition: formats.c:568
#define td
Definition: regdef.h:70
static const AVOption colorchannelmixer_options[]
#define AVERROR(e)
Definition: error.h:43
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:202
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:186
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:95
static int filter_slice_gbrp16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
void * priv
private data for use by the filter
Definition: avfilter.h:353
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:116
static int filter_slice_gbrp(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
const char * arg
Definition: jacosubdec.c:66
#define AV_PIX_FMT_GBRAP12
Definition: pixfmt.h:400
#define AV_PIX_FMT_RGB48
Definition: pixfmt.h:365
static int filter_slice_rgb48(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
packed ARGB 8:8:8:8, 32bpp, ARGBARGB...
Definition: pixfmt.h:92
#define AV_PIX_FMT_GBRAP16
Definition: pixfmt.h:401
static av_cold void uninit(AVFilterContext *ctx)
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:93
#define B
static const AVFilterPad colorchannelmixer_outputs[]
#define AV_PIX_FMT_GBRP16
Definition: pixfmt.h:398
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:802
static int filter_slice_gbrp10(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
#define FFMIN(a, b)
Definition: common.h:96
AVFormatContext * ctx
Definition: movenc.c:48
static int config_output(AVFilterLink *outlink)
#define ra
Definition: regdef.h:57
#define s(width, name)
Definition: cbs_vp9.c:257
packed RGB 8:8:8, 24bpp, BGRBGR...
Definition: pixfmt.h:69
static const AVFilterPad inputs[]
Definition: af_acontrast.c:193
#define AV_PIX_FMT_GBRP14
Definition: pixfmt.h:397
static const AVFilterPad outputs[]
Definition: af_acontrast.c:203
int ff_fill_rgba_map(uint8_t *rgba_map, enum AVPixelFormat pix_fmt)
Definition: drawutils.c:35
static int filter_slice_rgb24(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
misc drawing utilities
#define FLAGS
int av_frame_is_writable(AVFrame *frame)
Check if the frame data is writable.
Definition: frame.c:594
Used for passing data between threads.
Definition: af_adeclick.c:484
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:257
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(constuint8_t *) pi-0x80)*(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(constuint8_t *) pi-0x80)*(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(constint16_t *) pi >>8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t,*(constint16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t,*(constint16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(constint32_t *) pi >>24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t,*(constint32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t,*(constint32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(constfloat *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(constfloat *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(constfloat *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(constdouble *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(constdouble *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(constdouble *) pi *(1U<< 31))))#defineSET_CONV_FUNC_GROUP(ofmt, ifmt) staticvoidset_generic_function(AudioConvert *ac){}voidff_audio_convert_free(AudioConvert **ac){if(!*ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);}AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enumAVSampleFormatout_fmt, enumAVSampleFormatin_fmt, intchannels, intsample_rate, intapply_map){AudioConvert *ac;intin_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) returnNULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method!=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt)>2){ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc){av_free(ac);returnNULL;}returnac;}in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar){ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar?ac->channels:1;}elseif(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;elseac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);returnac;}intff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in){intuse_generic=1;intlen=in->nb_samples;intp;if(ac->dc){av_log(ac->avr, AV_LOG_TRACE,"%dsamples-audio_convert:%sto%s(dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));returnff_convert_dither(ac-> in
Describe the class of an AVClass context structure.
Definition: log.h:67
Filter definition.
Definition: avfilter.h:144
packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
Definition: pixfmt.h:240
const char * name
Filter name.
Definition: avfilter.h:148
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:350
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:266
#define AV_PIX_FMT_GBRP12
Definition: pixfmt.h:396
#define flags(name, subs,...)
Definition: cbs_av1.c:596
AVFilterInternal * internal
An opaque struct for libavfilter internal use.
Definition: avfilter.h:378
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:240
#define R
static int filter_slice_gbrap16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
AVFilter ff_vf_colorchannelmixer
int
planar GBRA 4:4:4:4 32bpp
Definition: pixfmt.h:215
avfilter_execute_func * execute
Definition: internal.h:155
static int slice_end(AVCodecContext *avctx, AVFrame *pict)
Handle slice ends.
Definition: mpeg12dec.c:2029
A list of supported formats for one end of a filter link.
Definition: formats.h:64
#define lrint
Definition: tablegen.h:53
An instance of a filter.
Definition: avfilter.h:338
static av_always_inline int filter_slice_rgba_planar(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs, int have_alpha)
int height
Definition: frame.h:284
FILE * out
Definition: movenc.c:54
#define av_freep(p)
#define av_always_inline
Definition: attributes.h:39
static av_always_inline int filter_slice_rgba_packed(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs, int have_alpha, int step)
internal API functions
int depth
Number of bits in the component.
Definition: pixdesc.h:58
packed RGB 8:8:8, 32bpp, XRGBXRGB... X=unused/undefined
Definition: pixfmt.h:237
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
static int filter_slice_rgba64(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
for(j=16;j >0;--j)
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:654
GLuint buffer
Definition: opengl_enc.c:102