FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
vf_paletteuse.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2015 Stupeflix
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 /**
22  * @file
23  * Use a palette to downsample an input video stream.
24  */
25 
26 #include "libavutil/bprint.h"
27 #include "libavutil/opt.h"
28 #include "libavutil/qsort.h"
29 #include "dualinput.h"
30 #include "avfilter.h"
31 
40 };
41 
47 };
48 
49 enum diff_mode {
53 };
54 
55 struct color_node {
58  int split;
60 };
61 
62 #define NBITS 5
63 #define CACHE_SIZE (1<<(3*NBITS))
64 
65 struct cached_color {
66  uint32_t color;
68 };
69 
70 struct cache_node {
73 };
74 
75 struct PaletteUseContext;
76 
77 typedef int (*set_frame_func)(struct PaletteUseContext *s, AVFrame *out, AVFrame *in,
78  int x_start, int y_start, int width, int height);
79 
80 typedef struct PaletteUseContext {
81  const AVClass *class;
83  struct cache_node cache[CACHE_SIZE]; /* lookup cache */
84  struct color_node map[AVPALETTE_COUNT]; /* 3D-Tree (KD-Tree with K=3) for reverse colormap */
87  int dither;
90  int ordered_dither[8*8];
91  int diff_mode;
94 
95  /* debug options */
96  char *dot_filename;
99  uint64_t total_mean_err;
102 
103 #define OFFSET(x) offsetof(PaletteUseContext, x)
104 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
105 static const AVOption paletteuse_options[] = {
106  { "dither", "select dithering mode", OFFSET(dither), AV_OPT_TYPE_INT, {.i64=DITHERING_SIERRA2_4A}, 0, NB_DITHERING-1, FLAGS, "dithering_mode" },
107  { "bayer", "ordered 8x8 bayer dithering (deterministic)", 0, AV_OPT_TYPE_CONST, {.i64=DITHERING_BAYER}, INT_MIN, INT_MAX, FLAGS, "dithering_mode" },
108  { "heckbert", "dithering as defined by Paul Heckbert in 1982 (simple error diffusion)", 0, AV_OPT_TYPE_CONST, {.i64=DITHERING_HECKBERT}, INT_MIN, INT_MAX, FLAGS, "dithering_mode" },
109  { "floyd_steinberg", "Floyd and Steingberg dithering (error diffusion)", 0, AV_OPT_TYPE_CONST, {.i64=DITHERING_FLOYD_STEINBERG}, INT_MIN, INT_MAX, FLAGS, "dithering_mode" },
110  { "sierra2", "Frankie Sierra dithering v2 (error diffusion)", 0, AV_OPT_TYPE_CONST, {.i64=DITHERING_SIERRA2}, INT_MIN, INT_MAX, FLAGS, "dithering_mode" },
111  { "sierra2_4a", "Frankie Sierra dithering v2 \"Lite\" (error diffusion)", 0, AV_OPT_TYPE_CONST, {.i64=DITHERING_SIERRA2_4A}, INT_MIN, INT_MAX, FLAGS, "dithering_mode" },
112  { "bayer_scale", "set scale for bayer dithering", OFFSET(bayer_scale), AV_OPT_TYPE_INT, {.i64=2}, 0, 5, FLAGS },
113  { "diff_mode", "set frame difference mode", OFFSET(diff_mode), AV_OPT_TYPE_INT, {.i64=DIFF_MODE_NONE}, 0, NB_DIFF_MODE-1, FLAGS, "diff_mode" },
114  { "rectangle", "process smallest different rectangle", 0, AV_OPT_TYPE_CONST, {.i64=DIFF_MODE_RECTANGLE}, INT_MIN, INT_MAX, FLAGS, "diff_mode" },
115 
116  /* following are the debug options, not part of the official API */
117  { "debug_kdtree", "save Graphviz graph of the kdtree in specified file", OFFSET(dot_filename), AV_OPT_TYPE_STRING, {.str=NULL}, CHAR_MIN, CHAR_MAX, FLAGS },
118  { "color_search", "set reverse colormap color search method", OFFSET(color_search_method), AV_OPT_TYPE_INT, {.i64=COLOR_SEARCH_NNS_ITERATIVE}, 0, NB_COLOR_SEARCHES-1, FLAGS, "search" },
119  { "nns_iterative", "iterative search", 0, AV_OPT_TYPE_CONST, {.i64=COLOR_SEARCH_NNS_ITERATIVE}, INT_MIN, INT_MAX, FLAGS, "search" },
120  { "nns_recursive", "recursive search", 0, AV_OPT_TYPE_CONST, {.i64=COLOR_SEARCH_NNS_RECURSIVE}, INT_MIN, INT_MAX, FLAGS, "search" },
121  { "bruteforce", "brute-force into the palette", 0, AV_OPT_TYPE_CONST, {.i64=COLOR_SEARCH_BRUTEFORCE}, INT_MIN, INT_MAX, FLAGS, "search" },
122  { "mean_err", "compute and print mean error", OFFSET(calc_mean_err), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, FLAGS },
123  { "debug_accuracy", "test color search accuracy", OFFSET(debug_accuracy), AV_OPT_TYPE_FLAGS, {.i64=0}, 0, 1, FLAGS },
124  { NULL }
125 };
126 
127 AVFILTER_DEFINE_CLASS(paletteuse);
128 
130 {
131  static const enum AVPixelFormat in_fmts[] = {AV_PIX_FMT_RGB32, AV_PIX_FMT_NONE};
132  static const enum AVPixelFormat inpal_fmts[] = {AV_PIX_FMT_RGB32, AV_PIX_FMT_NONE};
133  static const enum AVPixelFormat out_fmts[] = {AV_PIX_FMT_PAL8, AV_PIX_FMT_NONE};
135  AVFilterFormats *inpal = ff_make_format_list(inpal_fmts);
137  if (!in || !inpal || !out) {
138  av_freep(&in);
139  av_freep(&inpal);
140  av_freep(&out);
141  return AVERROR(ENOMEM);
142  }
143  ff_formats_ref(in, &ctx->inputs[0]->out_formats);
144  ff_formats_ref(inpal, &ctx->inputs[1]->out_formats);
145  ff_formats_ref(out, &ctx->outputs[0]->in_formats);
146  return 0;
147 }
148 
149 static av_always_inline int dither_color(uint32_t px, int er, int eg, int eb, int scale, int shift)
150 {
151  return av_clip_uint8((px >> 16 & 0xff) + ((er * scale) / (1<<shift))) << 16
152  | av_clip_uint8((px >> 8 & 0xff) + ((eg * scale) / (1<<shift))) << 8
153  | av_clip_uint8((px & 0xff) + ((eb * scale) / (1<<shift)));
154 }
155 
156 static av_always_inline int diff(const uint8_t *c1, const uint8_t *c2)
157 {
158  // XXX: try L*a*b with CIE76 (dL*dL + da*da + db*db)
159  const int dr = c1[0] - c2[0];
160  const int dg = c1[1] - c2[1];
161  const int db = c1[2] - c2[2];
162  return dr*dr + dg*dg + db*db;
163 }
164 
166 {
167  int i, pal_id = -1, min_dist = INT_MAX;
168 
169  for (i = 0; i < AVPALETTE_COUNT; i++) {
170  const uint32_t c = palette[i];
171 
172  if ((c & 0xff000000) == 0xff000000) { // ignore transparent entry
173  const uint8_t palrgb[] = {
174  palette[i]>>16 & 0xff,
175  palette[i]>> 8 & 0xff,
176  palette[i] & 0xff,
177  };
178  const int d = diff(palrgb, rgb);
179  if (d < min_dist) {
180  pal_id = i;
181  min_dist = d;
182  }
183  }
184  }
185  return pal_id;
186 }
187 
188 /* Recursive form, simpler but a bit slower. Kept for reference. */
190  int node_pos;
191  int dist_sqd;
192 };
193 
194 static void colormap_nearest_node(const struct color_node *map,
195  const int node_pos,
196  const uint8_t *target,
197  struct nearest_color *nearest)
198 {
199  const struct color_node *kd = map + node_pos;
200  const int s = kd->split;
201  int dx, nearer_kd_id, further_kd_id;
202  const uint8_t *current = kd->val;
203  const int current_to_target = diff(target, current);
204 
205  if (current_to_target < nearest->dist_sqd) {
206  nearest->node_pos = node_pos;
207  nearest->dist_sqd = current_to_target;
208  }
209 
210  if (kd->left_id != -1 || kd->right_id != -1) {
211  dx = target[s] - current[s];
212 
213  if (dx <= 0) nearer_kd_id = kd->left_id, further_kd_id = kd->right_id;
214  else nearer_kd_id = kd->right_id, further_kd_id = kd->left_id;
215 
216  if (nearer_kd_id != -1)
217  colormap_nearest_node(map, nearer_kd_id, target, nearest);
218 
219  if (further_kd_id != -1 && dx*dx < nearest->dist_sqd)
220  colormap_nearest_node(map, further_kd_id, target, nearest);
221  }
222 }
223 
225 {
226  struct nearest_color res = {.dist_sqd = INT_MAX, .node_pos = -1};
227  colormap_nearest_node(node, 0, rgb, &res);
228  return node[res.node_pos].palette_id;
229 }
230 
231 struct stack_node {
232  int color_id;
233  int dx2;
234 };
235 
236 static av_always_inline uint8_t colormap_nearest_iterative(const struct color_node *root, const uint8_t *target)
237 {
238  int pos = 0, best_node_id = -1, best_dist = INT_MAX, cur_color_id = 0;
239  struct stack_node nodes[16];
240  struct stack_node *node = &nodes[0];
241 
242  for (;;) {
243 
244  const struct color_node *kd = &root[cur_color_id];
245  const uint8_t *current = kd->val;
246  const int current_to_target = diff(target, current);
247 
248  /* Compare current color node to the target and update our best node if
249  * it's actually better. */
250  if (current_to_target < best_dist) {
251  best_node_id = cur_color_id;
252  if (!current_to_target)
253  goto end; // exact match, we can return immediately
254  best_dist = current_to_target;
255  }
256 
257  /* Check if it's not a leaf */
258  if (kd->left_id != -1 || kd->right_id != -1) {
259  const int split = kd->split;
260  const int dx = target[split] - current[split];
261  int nearer_kd_id, further_kd_id;
262 
263  /* Define which side is the most interesting. */
264  if (dx <= 0) nearer_kd_id = kd->left_id, further_kd_id = kd->right_id;
265  else nearer_kd_id = kd->right_id, further_kd_id = kd->left_id;
266 
267  if (nearer_kd_id != -1) {
268  if (further_kd_id != -1) {
269  /* Here, both paths are defined, so we push a state for
270  * when we are going back. */
271  node->color_id = further_kd_id;
272  node->dx2 = dx*dx;
273  pos++;
274  node++;
275  }
276  /* We can now update current color with the most probable path
277  * (no need to create a state since there is nothing to save
278  * anymore). */
279  cur_color_id = nearer_kd_id;
280  continue;
281  } else if (dx*dx < best_dist) {
282  /* The nearest path isn't available, so there is only one path
283  * possible and it's the least probable. We enter it only if the
284  * distance from the current point to the hyper rectangle is
285  * less than our best distance. */
286  cur_color_id = further_kd_id;
287  continue;
288  }
289  }
290 
291  /* Unstack as much as we can, typically as long as the least probable
292  * branch aren't actually probable. */
293  do {
294  if (--pos < 0)
295  goto end;
296  node--;
297  } while (node->dx2 >= best_dist);
298 
299  /* We got a node where the least probable branch might actually contain
300  * a relevant color. */
301  cur_color_id = node->color_id;
302  }
303 
304 end:
305  return root[best_node_id].palette_id;
306 }
307 
308 #define COLORMAP_NEAREST(search, palette, root, target) \
309  search == COLOR_SEARCH_NNS_ITERATIVE ? colormap_nearest_iterative(root, target) : \
310  search == COLOR_SEARCH_NNS_RECURSIVE ? colormap_nearest_recursive(root, target) : \
311  colormap_nearest_bruteforce(palette, target)
312 
313 /**
314  * Check if the requested color is in the cache already. If not, find it in the
315  * color tree and cache it.
316  * Note: r, g, and b are the component of c but are passed as well to avoid
317  * recomputing them (they are generally computed by the caller for other uses).
318  */
319 static av_always_inline int color_get(struct cache_node *cache, uint32_t color,
321  const struct color_node *map,
322  const uint32_t *palette,
323  const enum color_search_method search_method)
324 {
325  int i;
326  const uint8_t rgb[] = {r, g, b};
327  const uint8_t rhash = r & ((1<<NBITS)-1);
328  const uint8_t ghash = g & ((1<<NBITS)-1);
329  const uint8_t bhash = b & ((1<<NBITS)-1);
330  const unsigned hash = rhash<<(NBITS*2) | ghash<<NBITS | bhash;
331  struct cache_node *node = &cache[hash];
332  struct cached_color *e;
333 
334  for (i = 0; i < node->nb_entries; i++) {
335  e = &node->entries[i];
336  if (e->color == color)
337  return e->pal_entry;
338  }
339 
340  e = av_dynarray2_add((void**)&node->entries, &node->nb_entries,
341  sizeof(*node->entries), NULL);
342  if (!e)
343  return AVERROR(ENOMEM);
344  e->color = color;
345  e->pal_entry = COLORMAP_NEAREST(search_method, palette, map, rgb);
346  return e->pal_entry;
347 }
348 
350  uint32_t c, const struct color_node *map,
351  const uint32_t *palette,
352  int *er, int *eg, int *eb,
353  const enum color_search_method search_method)
354 {
355  const uint8_t r = c >> 16 & 0xff;
356  const uint8_t g = c >> 8 & 0xff;
357  const uint8_t b = c & 0xff;
358  const int dstx = color_get(cache, c, r, g, b, map, palette, search_method);
359  const uint32_t dstc = palette[dstx];
360  *er = r - (dstc >> 16 & 0xff);
361  *eg = g - (dstc >> 8 & 0xff);
362  *eb = b - (dstc & 0xff);
363  return dstx;
364 }
365 
367  int x_start, int y_start, int w, int h,
368  enum dithering_mode dither,
369  const enum color_search_method search_method)
370 {
371  int x, y;
372  const struct color_node *map = s->map;
373  struct cache_node *cache = s->cache;
374  const uint32_t *palette = s->palette;
375  const int src_linesize = in ->linesize[0] >> 2;
376  const int dst_linesize = out->linesize[0];
377  uint32_t *src = ((uint32_t *)in ->data[0]) + y_start*src_linesize;
378  uint8_t *dst = out->data[0] + y_start*dst_linesize;
379 
380  w += x_start;
381  h += y_start;
382 
383  for (y = y_start; y < h; y++) {
384  for (x = x_start; x < w; x++) {
385  int er, eg, eb;
386 
387  if (dither == DITHERING_BAYER) {
388  const int d = s->ordered_dither[(y & 7)<<3 | (x & 7)];
389  const uint8_t r8 = src[x] >> 16 & 0xff;
390  const uint8_t g8 = src[x] >> 8 & 0xff;
391  const uint8_t b8 = src[x] & 0xff;
392  const uint8_t r = av_clip_uint8(r8 + d);
393  const uint8_t g = av_clip_uint8(g8 + d);
394  const uint8_t b = av_clip_uint8(b8 + d);
395  const uint32_t c = r<<16 | g<<8 | b;
396  const int color = color_get(cache, c, r, g, b, map, palette, search_method);
397 
398  if (color < 0)
399  return color;
400  dst[x] = color;
401 
402  } else if (dither == DITHERING_HECKBERT) {
403  const int right = x < w - 1, down = y < h - 1;
404  const int color = get_dst_color_err(cache, src[x], map, palette, &er, &eg, &eb, search_method);
405 
406  if (color < 0)
407  return color;
408  dst[x] = color;
409 
410  if (right) src[ x + 1] = dither_color(src[ x + 1], er, eg, eb, 3, 3);
411  if ( down) src[src_linesize + x ] = dither_color(src[src_linesize + x ], er, eg, eb, 3, 3);
412  if (right && down) src[src_linesize + x + 1] = dither_color(src[src_linesize + x + 1], er, eg, eb, 2, 3);
413 
414  } else if (dither == DITHERING_FLOYD_STEINBERG) {
415  const int right = x < w - 1, down = y < h - 1, left = x > x_start;
416  const int color = get_dst_color_err(cache, src[x], map, palette, &er, &eg, &eb, search_method);
417 
418  if (color < 0)
419  return color;
420  dst[x] = color;
421 
422  if (right) src[ x + 1] = dither_color(src[ x + 1], er, eg, eb, 7, 4);
423  if (left && down) src[src_linesize + x - 1] = dither_color(src[src_linesize + x - 1], er, eg, eb, 3, 4);
424  if ( down) src[src_linesize + x ] = dither_color(src[src_linesize + x ], er, eg, eb, 5, 4);
425  if (right && down) src[src_linesize + x + 1] = dither_color(src[src_linesize + x + 1], er, eg, eb, 1, 4);
426 
427  } else if (dither == DITHERING_SIERRA2) {
428  const int right = x < w - 1, down = y < h - 1, left = x > x_start;
429  const int right2 = x < w - 2, left2 = x > x_start + 1;
430  const int color = get_dst_color_err(cache, src[x], map, palette, &er, &eg, &eb, search_method);
431 
432  if (color < 0)
433  return color;
434  dst[x] = color;
435 
436  if (right) src[ x + 1] = dither_color(src[ x + 1], er, eg, eb, 4, 4);
437  if (right2) src[ x + 2] = dither_color(src[ x + 2], er, eg, eb, 3, 4);
438 
439  if (down) {
440  if (left2) src[ src_linesize + x - 2] = dither_color(src[ src_linesize + x - 2], er, eg, eb, 1, 4);
441  if (left) src[ src_linesize + x - 1] = dither_color(src[ src_linesize + x - 1], er, eg, eb, 2, 4);
442  src[ src_linesize + x ] = dither_color(src[ src_linesize + x ], er, eg, eb, 3, 4);
443  if (right) src[ src_linesize + x + 1] = dither_color(src[ src_linesize + x + 1], er, eg, eb, 2, 4);
444  if (right2) src[ src_linesize + x + 2] = dither_color(src[ src_linesize + x + 2], er, eg, eb, 1, 4);
445  }
446 
447  } else if (dither == DITHERING_SIERRA2_4A) {
448  const int right = x < w - 1, down = y < h - 1, left = x > x_start;
449  const int color = get_dst_color_err(cache, src[x], map, palette, &er, &eg, &eb, search_method);
450 
451  if (color < 0)
452  return color;
453  dst[x] = color;
454 
455  if (right) src[ x + 1] = dither_color(src[ x + 1], er, eg, eb, 2, 2);
456  if (left && down) src[src_linesize + x - 1] = dither_color(src[src_linesize + x - 1], er, eg, eb, 1, 2);
457  if ( down) src[src_linesize + x ] = dither_color(src[src_linesize + x ], er, eg, eb, 1, 2);
458 
459  } else {
460  const uint8_t r = src[x] >> 16 & 0xff;
461  const uint8_t g = src[x] >> 8 & 0xff;
462  const uint8_t b = src[x] & 0xff;
463  const int color = color_get(cache, src[x] & 0xffffff, r, g, b, map, palette, search_method);
464 
465  if (color < 0)
466  return color;
467  dst[x] = color;
468  }
469  }
470  src += src_linesize;
471  dst += dst_linesize;
472  }
473  return 0;
474 }
475 
476 #define INDENT 4
477 static void disp_node(AVBPrint *buf,
478  const struct color_node *map,
479  int parent_id, int node_id,
480  int depth)
481 {
482  const struct color_node *node = &map[node_id];
483  const uint32_t fontcolor = node->val[0] > 0x50 &&
484  node->val[1] > 0x50 &&
485  node->val[2] > 0x50 ? 0 : 0xffffff;
486  av_bprintf(buf, "%*cnode%d ["
487  "label=\"%c%02X%c%02X%c%02X%c\" "
488  "fillcolor=\"#%02x%02x%02x\" "
489  "fontcolor=\"#%06X\"]\n",
490  depth*INDENT, ' ', node->palette_id,
491  "[ "[node->split], node->val[0],
492  "][ "[node->split], node->val[1],
493  " ]["[node->split], node->val[2],
494  " ]"[node->split],
495  node->val[0], node->val[1], node->val[2],
496  fontcolor);
497  if (parent_id != -1)
498  av_bprintf(buf, "%*cnode%d -> node%d\n", depth*INDENT, ' ',
499  map[parent_id].palette_id, node->palette_id);
500  if (node->left_id != -1) disp_node(buf, map, node_id, node->left_id, depth + 1);
501  if (node->right_id != -1) disp_node(buf, map, node_id, node->right_id, depth + 1);
502 }
503 
504 // debug_kdtree=kdtree.dot -> dot -Tpng kdtree.dot > kdtree.png
505 static int disp_tree(const struct color_node *node, const char *fname)
506 {
507  AVBPrint buf;
508  FILE *f = av_fopen_utf8(fname, "w");
509 
510  if (!f) {
511  int ret = AVERROR(errno);
512  av_log(NULL, AV_LOG_ERROR, "Cannot open file '%s' for writing: %s\n",
513  fname, av_err2str(ret));
514  return ret;
515  }
516 
518 
519  av_bprintf(&buf, "digraph {\n");
520  av_bprintf(&buf, " node [style=filled fontsize=10 shape=box]\n");
521  disp_node(&buf, node, -1, 0, 0);
522  av_bprintf(&buf, "}\n");
523 
524  fwrite(buf.str, 1, buf.len, f);
525  fclose(f);
526  av_bprint_finalize(&buf, NULL);
527  return 0;
528 }
529 
530 static int debug_accuracy(const struct color_node *node, const uint32_t *palette,
531  const enum color_search_method search_method)
532 {
533  int r, g, b, ret = 0;
534 
535  for (r = 0; r < 256; r++) {
536  for (g = 0; g < 256; g++) {
537  for (b = 0; b < 256; b++) {
538  const uint8_t rgb[] = {r, g, b};
539  const int r1 = COLORMAP_NEAREST(search_method, palette, node, rgb);
540  const int r2 = colormap_nearest_bruteforce(palette, rgb);
541  if (r1 != r2) {
542  const uint32_t c1 = palette[r1];
543  const uint32_t c2 = palette[r2];
544  const uint8_t palrgb1[] = { c1>>16 & 0xff, c1>> 8 & 0xff, c1 & 0xff };
545  const uint8_t palrgb2[] = { c2>>16 & 0xff, c2>> 8 & 0xff, c2 & 0xff };
546  const int d1 = diff(palrgb1, rgb);
547  const int d2 = diff(palrgb2, rgb);
548  if (d1 != d2) {
550  "/!\\ %02X%02X%02X: %d ! %d (%06X ! %06X) / dist: %d ! %d\n",
551  r, g, b, r1, r2, c1 & 0xffffff, c2 & 0xffffff, d1, d2);
552  ret = 1;
553  }
554  }
555  }
556  }
557  }
558  return ret;
559 }
560 
561 struct color {
562  uint32_t value;
564 };
565 
566 struct color_rect {
569 };
570 
571 typedef int (*cmp_func)(const void *, const void *);
572 
573 #define DECLARE_CMP_FUNC(name, pos) \
574 static int cmp_##name(const void *pa, const void *pb) \
575 { \
576  const struct color *a = pa; \
577  const struct color *b = pb; \
578  return (a->value >> (8 * (2 - (pos))) & 0xff) \
579  - (b->value >> (8 * (2 - (pos))) & 0xff); \
580 }
581 
585 
586 static const cmp_func cmp_funcs[] = {cmp_r, cmp_g, cmp_b};
587 
588 static int get_next_color(const uint8_t *color_used, const uint32_t *palette,
589  int *component, const struct color_rect *box)
590 {
591  int wr, wg, wb;
592  int i, longest = 0;
593  unsigned nb_color = 0;
594  struct color_rect ranges;
595  struct color tmp_pal[256];
596  cmp_func cmpf;
597 
598  ranges.min[0] = ranges.min[1] = ranges.min[2] = 0xff;
599  ranges.max[0] = ranges.max[1] = ranges.max[2] = 0x00;
600 
601  for (i = 0; i < AVPALETTE_COUNT; i++) {
602  const uint32_t c = palette[i];
603  const uint8_t r = c >> 16 & 0xff;
604  const uint8_t g = c >> 8 & 0xff;
605  const uint8_t b = c & 0xff;
606 
607  if (color_used[i] ||
608  r < box->min[0] || g < box->min[1] || b < box->min[2] ||
609  r > box->max[0] || g > box->max[1] || b > box->max[2])
610  continue;
611 
612  if (r < ranges.min[0]) ranges.min[0] = r;
613  if (g < ranges.min[1]) ranges.min[1] = g;
614  if (b < ranges.min[2]) ranges.min[2] = b;
615 
616  if (r > ranges.max[0]) ranges.max[0] = r;
617  if (g > ranges.max[1]) ranges.max[1] = g;
618  if (b > ranges.max[2]) ranges.max[2] = b;
619 
620  tmp_pal[nb_color].value = c;
621  tmp_pal[nb_color].pal_id = i;
622 
623  nb_color++;
624  }
625 
626  if (!nb_color)
627  return -1;
628 
629  /* define longest axis that will be the split component */
630  wr = ranges.max[0] - ranges.min[0];
631  wg = ranges.max[1] - ranges.min[1];
632  wb = ranges.max[2] - ranges.min[2];
633  if (wr >= wg && wr >= wb) longest = 0;
634  if (wg >= wr && wg >= wb) longest = 1;
635  if (wb >= wr && wb >= wg) longest = 2;
636  cmpf = cmp_funcs[longest];
637  *component = longest;
638 
639  /* sort along this axis to get median */
640  AV_QSORT(tmp_pal, nb_color, struct color, cmpf);
641 
642  return tmp_pal[nb_color >> 1].pal_id;
643 }
644 
645 static int colormap_insert(struct color_node *map,
646  uint8_t *color_used,
647  int *nb_used,
648  const uint32_t *palette,
649  const struct color_rect *box)
650 {
651  uint32_t c;
652  int component, cur_id;
653  int node_left_id = -1, node_right_id = -1;
654  struct color_node *node;
655  struct color_rect box1, box2;
656  const int pal_id = get_next_color(color_used, palette, &component, box);
657 
658  if (pal_id < 0)
659  return -1;
660 
661  /* create new node with that color */
662  cur_id = (*nb_used)++;
663  c = palette[pal_id];
664  node = &map[cur_id];
665  node->split = component;
666  node->palette_id = pal_id;
667  node->val[0] = c>>16 & 0xff;
668  node->val[1] = c>> 8 & 0xff;
669  node->val[2] = c & 0xff;
670 
671  color_used[pal_id] = 1;
672 
673  /* get the two boxes this node creates */
674  box1 = box2 = *box;
675  box1.max[component] = node->val[component];
676  box2.min[component] = node->val[component] + 1;
677 
678  node_left_id = colormap_insert(map, color_used, nb_used, palette, &box1);
679 
680  if (box2.min[component] <= box2.max[component])
681  node_right_id = colormap_insert(map, color_used, nb_used, palette, &box2);
682 
683  node->left_id = node_left_id;
684  node->right_id = node_right_id;
685 
686  return cur_id;
687 }
688 
689 static int cmp_pal_entry(const void *a, const void *b)
690 {
691  const int c1 = *(const uint32_t *)a & 0xffffff;
692  const int c2 = *(const uint32_t *)b & 0xffffff;
693  return c1 - c2;
694 }
695 
697 {
698  int i, nb_used = 0;
699  uint8_t color_used[AVPALETTE_COUNT] = {0};
700  uint32_t last_color = 0;
701  struct color_rect box;
702 
703  /* disable transparent colors and dups */
704  qsort(s->palette, AVPALETTE_COUNT, sizeof(*s->palette), cmp_pal_entry);
705  for (i = 0; i < AVPALETTE_COUNT; i++) {
706  const uint32_t c = s->palette[i];
707  if (i != 0 && c == last_color) {
708  color_used[i] = 1;
709  continue;
710  }
711  last_color = c;
712  if ((c & 0xff000000) != 0xff000000) {
713  color_used[i] = 1; // ignore transparent color(s)
714  continue;
715  }
716  }
717 
718  box.min[0] = box.min[1] = box.min[2] = 0x00;
719  box.max[0] = box.max[1] = box.max[2] = 0xff;
720 
721  colormap_insert(s->map, color_used, &nb_used, s->palette, &box);
722 
723  if (s->dot_filename)
724  disp_tree(s->map, s->dot_filename);
725 
726  if (s->debug_accuracy) {
728  av_log(NULL, AV_LOG_INFO, "Accuracy check passed\n");
729  }
730 }
731 
732 static void debug_mean_error(PaletteUseContext *s, const AVFrame *in1,
733  const AVFrame *in2, int frame_count)
734 {
735  int x, y;
736  const uint32_t *palette = s->palette;
737  uint32_t *src1 = (uint32_t *)in1->data[0];
738  uint8_t *src2 = in2->data[0];
739  const int src1_linesize = in1->linesize[0] >> 2;
740  const int src2_linesize = in2->linesize[0];
741  const float div = in1->width * in1->height * 3;
742  unsigned mean_err = 0;
743 
744  for (y = 0; y < in1->height; y++) {
745  for (x = 0; x < in1->width; x++) {
746  const uint32_t c1 = src1[x];
747  const uint32_t c2 = palette[src2[x]];
748  const uint8_t rgb1[] = {c1 >> 16 & 0xff, c1 >> 8 & 0xff, c1 & 0xff};
749  const uint8_t rgb2[] = {c2 >> 16 & 0xff, c2 >> 8 & 0xff, c2 & 0xff};
750  mean_err += diff(rgb1, rgb2);
751  }
752  src1 += src1_linesize;
753  src2 += src2_linesize;
754  }
755 
756  s->total_mean_err += mean_err;
757 
758  av_log(NULL, AV_LOG_INFO, "MEP:%.3f TotalMEP:%.3f\n",
759  mean_err / div, s->total_mean_err / (div * frame_count));
760 }
761 
763  const AVFrame *prv_src, const AVFrame *cur_src,
764  const AVFrame *prv_dst, AVFrame *cur_dst,
765  int *xp, int *yp, int *wp, int *hp)
766 {
767  int x_start = 0, y_start = 0;
768  int width = cur_src->width;
769  int height = cur_src->height;
770 
771  if (prv_src && diff_mode == DIFF_MODE_RECTANGLE) {
772  int y;
773  int x_end = cur_src->width - 1,
774  y_end = cur_src->height - 1;
775  const uint32_t *prv_srcp = (const uint32_t *)prv_src->data[0];
776  const uint32_t *cur_srcp = (const uint32_t *)cur_src->data[0];
777  const uint8_t *prv_dstp = prv_dst->data[0];
778  uint8_t *cur_dstp = cur_dst->data[0];
779 
780  const int prv_src_linesize = prv_src->linesize[0] >> 2;
781  const int cur_src_linesize = cur_src->linesize[0] >> 2;
782  const int prv_dst_linesize = prv_dst->linesize[0];
783  const int cur_dst_linesize = cur_dst->linesize[0];
784 
785  /* skip common lines */
786  while (y_start < y_end && !memcmp(prv_srcp + y_start*prv_src_linesize,
787  cur_srcp + y_start*cur_src_linesize,
788  cur_src->width * 4)) {
789  memcpy(cur_dstp + y_start*cur_dst_linesize,
790  prv_dstp + y_start*prv_dst_linesize,
791  cur_dst->width);
792  y_start++;
793  }
794  while (y_end > y_start && !memcmp(prv_srcp + y_end*prv_src_linesize,
795  cur_srcp + y_end*cur_src_linesize,
796  cur_src->width * 4)) {
797  memcpy(cur_dstp + y_end*cur_dst_linesize,
798  prv_dstp + y_end*prv_dst_linesize,
799  cur_dst->width);
800  y_end--;
801  }
802 
803  height = y_end + 1 - y_start;
804 
805  /* skip common columns */
806  while (x_start < x_end) {
807  int same_column = 1;
808  for (y = y_start; y <= y_end; y++) {
809  if (prv_srcp[y*prv_src_linesize + x_start] != cur_srcp[y*cur_src_linesize + x_start]) {
810  same_column = 0;
811  break;
812  }
813  }
814  if (!same_column)
815  break;
816  x_start++;
817  }
818  while (x_end > x_start) {
819  int same_column = 1;
820  for (y = y_start; y <= y_end; y++) {
821  if (prv_srcp[y*prv_src_linesize + x_end] != cur_srcp[y*cur_src_linesize + x_end]) {
822  same_column = 0;
823  break;
824  }
825  }
826  if (!same_column)
827  break;
828  x_end--;
829  }
830  width = x_end + 1 - x_start;
831 
832  if (x_start) {
833  for (y = y_start; y <= y_end; y++)
834  memcpy(cur_dstp + y*cur_dst_linesize,
835  prv_dstp + y*prv_dst_linesize, x_start);
836  }
837  if (x_end != cur_src->width - 1) {
838  const int copy_len = cur_src->width - 1 - x_end;
839  for (y = y_start; y <= y_end; y++)
840  memcpy(cur_dstp + y*cur_dst_linesize + x_end + 1,
841  prv_dstp + y*prv_dst_linesize + x_end + 1,
842  copy_len);
843  }
844  }
845  *xp = x_start;
846  *yp = y_start;
847  *wp = width;
848  *hp = height;
849 }
850 
852 {
853  int x, y, w, h;
854  AVFilterContext *ctx = inlink->dst;
855  PaletteUseContext *s = ctx->priv;
856  AVFilterLink *outlink = inlink->dst->outputs[0];
857 
858  AVFrame *out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
859  if (!out) {
860  av_frame_free(&in);
861  return NULL;
862  }
863  av_frame_copy_props(out, in);
864 
866  s->last_out, out, &x, &y, &w, &h);
867  av_frame_free(&s->last_in);
868  av_frame_free(&s->last_out);
869  s->last_in = av_frame_clone(in);
870  s->last_out = av_frame_clone(out);
871  if (!s->last_in || !s->last_out ||
873  av_frame_free(&in);
874  av_frame_free(&out);
875  return NULL;
876  }
877 
878  av_dlog(ctx, "%dx%d rect: (%d;%d) -> (%d,%d) [area:%dx%d]\n",
879  w, h, x, y, x+w, y+h, in->width, in->height);
880 
881  if (s->set_frame(s, out, in, x, y, w, h) < 0) {
882  av_frame_free(&out);
883  return NULL;
884  }
885  memcpy(out->data[1], s->palette, AVPALETTE_SIZE);
886  if (s->calc_mean_err)
887  debug_mean_error(s, in, out, inlink->frame_count);
888  av_frame_free(&in);
889  return out;
890 }
891 
892 static int config_output(AVFilterLink *outlink)
893 {
894  int ret;
895  AVFilterContext *ctx = outlink->src;
896  PaletteUseContext *s = ctx->priv;
897 
898  outlink->w = ctx->inputs[0]->w;
899  outlink->h = ctx->inputs[0]->h;
900 
901  outlink->time_base = ctx->inputs[0]->time_base;
902  if ((ret = ff_dualinput_init(ctx, &s->dinput)) < 0)
903  return ret;
904  return 0;
905 }
906 
908 {
909  AVFilterContext *ctx = inlink->dst;
910 
911  if (inlink->w * inlink->h != AVPALETTE_COUNT) {
912  av_log(ctx, AV_LOG_ERROR,
913  "Palette input must contain exactly %d pixels. "
914  "Specified input has %dx%d=%d pixels\n",
915  AVPALETTE_COUNT, inlink->w, inlink->h,
916  inlink->w * inlink->h);
917  return AVERROR(EINVAL);
918  }
919  return 0;
920 }
921 
922 static void load_palette(PaletteUseContext *s, const AVFrame *palette_frame)
923 {
924  int i, x, y;
925  const uint32_t *p = (const uint32_t *)palette_frame->data[0];
926  const int p_linesize = palette_frame->linesize[0] >> 2;
927 
928  i = 0;
929  for (y = 0; y < palette_frame->height; y++) {
930  for (x = 0; x < palette_frame->width; x++)
931  s->palette[i++] = p[x];
932  p += p_linesize;
933  }
934 
935  load_colormap(s);
936 
937  s->palette_loaded = 1;
938 }
939 
941  const AVFrame *second)
942 {
943  AVFilterLink *inlink = ctx->inputs[0];
944  PaletteUseContext *s = ctx->priv;
945  if (!s->palette_loaded) {
946  load_palette(s, second);
947  }
948  return apply_palette(inlink, main);
949 }
950 
951 static int filter_frame(AVFilterLink *inlink, AVFrame *in)
952 {
953  PaletteUseContext *s = inlink->dst->priv;
954  return ff_dualinput_filter_frame(&s->dinput, inlink, in);
955 }
956 
957 #define DEFINE_SET_FRAME(color_search, name, value) \
958 static int set_frame_##name(PaletteUseContext *s, AVFrame *out, AVFrame *in, \
959  int x_start, int y_start, int w, int h) \
960 { \
961  return set_frame(s, out, in, x_start, y_start, w, h, value, color_search); \
962 }
963 
964 #define DEFINE_SET_FRAME_COLOR_SEARCH(color_search, color_search_macro) \
965  DEFINE_SET_FRAME(color_search_macro, color_search##_##none, DITHERING_NONE) \
966  DEFINE_SET_FRAME(color_search_macro, color_search##_##bayer, DITHERING_BAYER) \
967  DEFINE_SET_FRAME(color_search_macro, color_search##_##heckbert, DITHERING_HECKBERT) \
968  DEFINE_SET_FRAME(color_search_macro, color_search##_##floyd_steinberg, DITHERING_FLOYD_STEINBERG) \
969  DEFINE_SET_FRAME(color_search_macro, color_search##_##sierra2, DITHERING_SIERRA2) \
970  DEFINE_SET_FRAME(color_search_macro, color_search##_##sierra2_4a, DITHERING_SIERRA2_4A) \
971 
975 
976 #define DITHERING_ENTRIES(color_search) { \
977  set_frame_##color_search##_none, \
978  set_frame_##color_search##_bayer, \
979  set_frame_##color_search##_heckbert, \
980  set_frame_##color_search##_floyd_steinberg, \
981  set_frame_##color_search##_sierra2, \
982  set_frame_##color_search##_sierra2_4a, \
983 }
984 
986  DITHERING_ENTRIES(nns_iterative),
987  DITHERING_ENTRIES(nns_recursive),
988  DITHERING_ENTRIES(bruteforce),
989 };
990 
991 static int dither_value(int p)
992 {
993  const int q = p ^ (p >> 3);
994  return (p & 4) >> 2 | (q & 4) >> 1 \
995  | (p & 2) << 1 | (q & 2) << 2 \
996  | (p & 1) << 4 | (q & 1) << 5;
997 }
998 
999 static av_cold int init(AVFilterContext *ctx)
1000 {
1001  PaletteUseContext *s = ctx->priv;
1002  s->dinput.repeatlast = 1; // only 1 frame in the palette
1004 
1005  s->set_frame = set_frame_lut[s->color_search_method][s->dither];
1006 
1007  if (s->dither == DITHERING_BAYER) {
1008  int i;
1009  const int delta = 1 << (5 - s->bayer_scale); // to avoid too much luma
1010 
1011  for (i = 0; i < FF_ARRAY_ELEMS(s->ordered_dither); i++)
1012  s->ordered_dither[i] = (dither_value(i) >> s->bayer_scale) - delta;
1013  }
1014 
1015  return 0;
1016 }
1017 
1018 static int request_frame(AVFilterLink *outlink)
1019 {
1020  PaletteUseContext *s = outlink->src->priv;
1021  return ff_dualinput_request_frame(&s->dinput, outlink);
1022 }
1023 
1024 static av_cold void uninit(AVFilterContext *ctx)
1025 {
1026  int i;
1027  PaletteUseContext *s = ctx->priv;
1028 
1030  for (i = 0; i < CACHE_SIZE; i++)
1031  av_freep(&s->cache[i].entries);
1032  av_frame_free(&s->last_in);
1033  av_frame_free(&s->last_out);
1034 }
1035 
1036 static const AVFilterPad paletteuse_inputs[] = {
1037  {
1038  .name = "default",
1039  .type = AVMEDIA_TYPE_VIDEO,
1040  .filter_frame = filter_frame,
1041  .needs_writable = 1, // for error diffusal dithering
1042  },{
1043  .name = "palette",
1044  .type = AVMEDIA_TYPE_VIDEO,
1045  .config_props = config_input_palette,
1046  .filter_frame = filter_frame,
1047  },
1048  { NULL }
1049 };
1050 
1052  {
1053  .name = "default",
1054  .type = AVMEDIA_TYPE_VIDEO,
1055  .config_props = config_output,
1056  .request_frame = request_frame,
1057  },
1058  { NULL }
1059 };
1060 
1062  .name = "paletteuse",
1063  .description = NULL_IF_CONFIG_SMALL("Use a palette to downsample an input video stream."),
1064  .priv_size = sizeof(PaletteUseContext),
1066  .init = init,
1067  .uninit = uninit,
1068  .inputs = paletteuse_inputs,
1069  .outputs = paletteuse_outputs,
1070  .priv_class = &paletteuse_class,
1071 };
diff_mode
Definition: vf_paletteuse.c:49
int(* cmp_func)(const void *, const void *)
uint64_t total_mean_err
Definition: vf_paletteuse.c:99
AVFILTER_DEFINE_CLASS(paletteuse)
#define NULL
Definition: coverity.c:32
const char * s
Definition: avisynth_c.h:631
void av_bprintf(AVBPrint *buf, const char *fmt,...)
Definition: bprint.c:94
static int shift(int a, int b)
Definition: sonic.c:82
static void load_palette(PaletteUseContext *s, const AVFrame *palette_frame)
static AVFrame * load_apply_palette(AVFilterContext *ctx, AVFrame *main, const AVFrame *second)
This structure describes decoded (raw) audio or video data.
Definition: frame.h:171
AVOption.
Definition: opt.h:255
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:101
color_search_method
Definition: vf_paletteuse.c:42
static const AVFilterPad outputs[]
Definition: af_ashowinfo.c:248
Main libavfilter public API header.
static AVFrame * apply_palette(AVFilterLink *inlink, AVFrame *in)
const char * g
Definition: vf_curves.c:108
dithering_mode
Definition: vf_paletteuse.c:32
static int query_formats(AVFilterContext *ctx)
FILE * av_fopen_utf8(const char *path, const char *mode)
Open a file using a UTF-8 filename.
Definition: file_open.c:92
const char * b
Definition: vf_curves.c:109
static const AVFilterPad paletteuse_outputs[]
static int debug_accuracy(const struct color_node *node, const uint32_t *palette, const enum color_search_method search_method)
void * av_dynarray2_add(void **tab_ptr, int *nb_ptr, size_t elem_size, const uint8_t *elem_data)
Add an element of size elem_size to a dynamic array.
Definition: mem.c:336
#define FF_ARRAY_ELEMS(a)
static av_always_inline int dither_color(uint32_t px, int er, int eg, int eb, int scale, int shift)
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:109
static av_cold int init(AVFilterContext *ctx)
int av_bprint_finalize(AVBPrint *buf, char **ret_str)
Finalize a print buffer.
Definition: bprint.c:235
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:283
const char * name
Pad name.
Definition: internal.h:67
int ff_dualinput_filter_frame(FFDualInputContext *s, AVFilterLink *inlink, AVFrame *in)
Definition: dualinput.c:69
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:641
int(* set_frame_func)(struct PaletteUseContext *s, AVFrame *out, AVFrame *in, int x_start, int y_start, int width, int height)
Definition: vf_paletteuse.c:77
AVFilter ff_vf_paletteuse
uint8_t
#define av_cold
Definition: attributes.h:74
float delta
8 bit with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:74
AVOptions.
uint8_t pal_id
#define INDENT
static const uint32_t color[16+AV_CLASS_CATEGORY_NB]
Definition: log.c:94
#define AVPALETTE_SIZE
Definition: pixfmt.h:33
static av_cold int end(AVCodecContext *avctx)
Definition: avrndec.c:67
uint32_t color
Definition: vf_paletteuse.c:66
static const cmp_func cmp_funcs[]
static int config_input_palette(AVFilterLink *inlink)
static const uint64_t c1
Definition: murmur3.c:49
uint32_t palette[AVPALETTE_COUNT]
Definition: vf_paletteuse.c:85
void ff_dualinput_uninit(FFDualInputContext *s)
Definition: dualinput.c:80
static const AVOption paletteuse_options[]
#define av_log(a,...)
uint8_t val[3]
Definition: vf_paletteuse.c:56
A filter pad used for either input or output.
Definition: internal.h:61
#define DEFINE_SET_FRAME_COLOR_SEARCH(color_search, color_search_macro)
static int disp_tree(const struct color_node *node, const char *fname)
set_frame_func set_frame
Definition: vf_paletteuse.c:88
int width
width and height of the video frame
Definition: frame.h:220
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
uint8_t max[3]
#define AV_BPRINT_SIZE_UNLIMITED
#define AVERROR(e)
Definition: error.h:43
static void colormap_nearest_node(const struct color_node *map, const int node_pos, const uint8_t *target, struct nearest_color *nearest)
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:148
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:175
void av_bprint_init(AVBPrint *buf, unsigned size_init, unsigned size_max)
Definition: bprint.c:69
const char * r
Definition: vf_curves.c:107
static const uint8_t dither[8][8]
Definition: vf_fspp.c:57
void * priv
private data for use by the filter
Definition: avfilter.h:654
static const set_frame_func set_frame_lut[NB_COLOR_SEARCHES][NB_DITHERING]
static int request_frame(AVFilterLink *outlink)
int depth
Definition: v4l.c:61
static int dither_value(int p)
static char * split(char *message, char delim)
Definition: af_channelmap.c:82
static av_always_inline uint8_t colormap_nearest_iterative(const struct color_node *root, const uint8_t *target)
float y
static void disp_node(AVBPrint *buf, const struct color_node *map, int parent_id, int node_id, int depth)
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:422
ret
Definition: avfilter.c:974
static void set_processing_window(enum diff_mode diff_mode, const AVFrame *prv_src, const AVFrame *cur_src, const AVFrame *prv_dst, AVFrame *cur_dst, int *xp, int *yp, int *wp, int *hp)
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: error.h:119
#define NBITS
Definition: vf_paletteuse.c:62
uint8_t min[3]
static av_always_inline int diff(const uint8_t *c1, const uint8_t *c2)
AVFrame *(* process)(AVFilterContext *ctx, AVFrame *main, const AVFrame *second)
Definition: dualinput.h:36
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:449
static av_always_inline int get_dst_color_err(struct cache_node *cache, uint32_t c, const struct color_node *map, const uint32_t *palette, int *er, int *eg, int *eb, const enum color_search_method search_method)
static int config_output(AVFilterLink *outlink)
#define av_dlog(pctx,...)
av_dlog macros
Definition: log.h:330
static struct AVHashContext * hash
Definition: ffprobe.c:216
#define AV_LOG_INFO
Standard information.
Definition: log.h:187
AVS_Value src
Definition: avisynth_c.h:482
static int cmp_pal_entry(const void *a, const void *b)
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:199
#define AV_PIX_FMT_RGB32
Definition: pixfmt.h:341
AVFrame * last_out
Definition: vf_paletteuse.c:93
static int filter_frame(AVFilterLink *inlink, AVFrame *in)
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(constuint8_t *) pi-0x80)*(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(constuint8_t *) pi-0x80)*(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(constint16_t *) pi >>8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t,*(constint16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t,*(constint16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(constint32_t *) pi >>24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t,*(constint32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t,*(constint32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(constfloat *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(constfloat *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(constfloat *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(constdouble *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(constdouble *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(constdouble *) pi *(1U<< 31))))#defineSET_CONV_FUNC_GROUP(ofmt, ifmt) staticvoidset_generic_function(AudioConvert *ac){}voidff_audio_convert_free(AudioConvert **ac){if(!*ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);}AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enumAVSampleFormatout_fmt, enumAVSampleFormatin_fmt, intchannels, intsample_rate, intapply_map){AudioConvert *ac;intin_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) returnNULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method!=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt)>2){ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc){av_free(ac);returnNULL;}returnac;}in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar){ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar?ac->channels:1;}elseif(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;elseac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);returnac;}intff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in){intuse_generic=1;intlen=in->nb_samples;intp;if(ac->dc){av_log(ac->avr, AV_LOG_TRACE,"%dsamples-audio_convert:%sto%s(dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));returnff_convert_dither(ac-> in
int repeatlast
repeat last second frame
Definition: dualinput.h:38
void * buf
Definition: avisynth_c.h:553
BYTE int const BYTE int int int height
Definition: avisynth_c.h:676
Describe the class of an AVClass context structure.
Definition: log.h:67
#define DITHERING_ENTRIES(color_search)
Filter definition.
Definition: avfilter.h:470
static const AVFilterPad inputs[]
Definition: af_ashowinfo.c:239
static int get_next_color(const uint8_t *color_used, const uint32_t *palette, int *component, const struct color_rect *box)
const char * name
Filter name.
Definition: avfilter.h:474
static void load_colormap(PaletteUseContext *s)
static int colormap_insert(struct color_node *map, uint8_t *color_used, int *nb_used, const uint32_t *palette, const struct color_rect *box)
#define DECLARE_CMP_FUNC(name, pos)
#define OFFSET(x)
int ff_dualinput_init(AVFilterContext *ctx, FFDualInputContext *s)
Definition: dualinput.c:43
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:648
#define AVPALETTE_COUNT
Definition: pixfmt.h:34
#define COLORMAP_NEAREST(search, palette, root, target)
int av_frame_make_writable(AVFrame *frame)
Ensure that the frame data is writable, avoiding data copy if possible.
Definition: frame.c:505
#define FLAGS
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:182
FFDualInputContext dinput
Definition: vf_paletteuse.c:82
int palette
Definition: v4l.c:60
uint8_t pal_entry
Definition: vf_paletteuse.c:67
struct cache_node cache[CACHE_SIZE]
Definition: vf_paletteuse.c:83
static double c[64]
struct cached_color * entries
Definition: vf_paletteuse.c:71
static const uint64_t c2
Definition: murmur3.c:50
static av_always_inline uint8_t colormap_nearest_recursive(const struct color_node *node, const uint8_t *rgb)
static av_always_inline int set_frame(PaletteUseContext *s, AVFrame *out, AVFrame *in, int x_start, int y_start, int w, int h, enum dithering_mode dither, const enum color_search_method search_method)
static av_cold void uninit(AVFilterContext *ctx)
static av_always_inline uint8_t colormap_nearest_bruteforce(const uint32_t *palette, const uint8_t *rgb)
Double input streams helper for filters.
A list of supported formats for one end of a filter link.
Definition: formats.h:64
static av_always_inline int color_get(struct cache_node *cache, uint32_t color, uint8_t r, uint8_t g, uint8_t b, const struct color_node *map, const uint32_t *palette, const enum color_search_method search_method)
Check if the requested color is in the cache already.
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(constuint8_t *) pi-0x80)*(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(constuint8_t *) pi-0x80)*(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(constint16_t *) pi >>8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t,*(constint16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t,*(constint16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(constint32_t *) pi >>24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t,*(constint32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t,*(constint32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(constfloat *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(constfloat *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(constfloat *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(constdouble *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(constdouble *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(constdouble *) pi *(1U<< 31))))#defineSET_CONV_FUNC_GROUP(ofmt, ifmt) staticvoidset_generic_function(AudioConvert *ac){}voidff_audio_convert_free(AudioConvert **ac){if(!*ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);}AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enumAVSampleFormatout_fmt, enumAVSampleFormatin_fmt, intchannels, intsample_rate, intapply_map){AudioConvert *ac;intin_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) returnNULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method!=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt)>2){ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc){av_free(ac);returnNULL;}returnac;}in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar){ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar?ac->channels:1;}elseif(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;elseac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);returnac;}intff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in){intuse_generic=1;intlen=in->nb_samples;intp;if(ac->dc){av_log(ac->avr, AV_LOG_TRACE,"%dsamples-audio_convert:%sto%s(dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));returnff_convert_dither(ac-> out
An instance of a filter.
Definition: avfilter.h:633
int ff_dualinput_request_frame(FFDualInputContext *s, AVFilterLink *outlink)
Definition: dualinput.c:75
uint8_t palette_id
Definition: vf_paletteuse.c:57
int height
Definition: frame.h:220
#define av_freep(p)
#define av_always_inline
Definition: attributes.h:37
static const AVFilterPad paletteuse_inputs[]
int main(int argc, char **argv)
Definition: main.c:22
#define CACHE_SIZE
Definition: vf_paletteuse.c:63
struct color_node map[AVPALETTE_COUNT]
Definition: vf_paletteuse.c:84
float min
uint32_t value
AVPixelFormat
Pixel format.
Definition: pixfmt.h:61
#define AV_QSORT(p, num, type, cmp)
Quicksort This sort is fast, and fully inplace but not stable and it is possible to construct input t...
Definition: qsort.h:30
for(j=16;j >0;--j)
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:548
int ordered_dither[8 *8]
Definition: vf_paletteuse.c:90
static void debug_mean_error(PaletteUseContext *s, const AVFrame *in1, const AVFrame *in2, int frame_count)
static int width