FFmpeg
vf_paletteuse.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2015 Stupeflix
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 /**
22  * @file
23  * Use a palette to downsample an input video stream.
24  */
25 
26 #include "libavutil/bprint.h"
27 #include "libavutil/internal.h"
28 #include "libavutil/opt.h"
29 #include "libavutil/qsort.h"
30 #include "avfilter.h"
31 #include "filters.h"
32 #include "framesync.h"
33 #include "internal.h"
34 
43 };
44 
50 };
51 
52 enum diff_mode {
56 };
57 
58 struct color_node {
61  int split;
63 };
64 
65 #define NBITS 5
66 #define CACHE_SIZE (1<<(3*NBITS))
67 
68 struct cached_color {
69  uint32_t color;
71 };
72 
73 struct cache_node {
76 };
77 
78 struct PaletteUseContext;
79 
81  int x_start, int y_start, int width, int height);
82 
83 typedef struct PaletteUseContext {
84  const AVClass *class;
86  struct cache_node cache[CACHE_SIZE]; /* lookup cache */
87  struct color_node map[AVPALETTE_COUNT]; /* 3D-Tree (KD-Tree with K=3) for reverse colormap */
89  int transparency_index; /* index in the palette of transparency. -1 if there is no transparency in the palette. */
92  int dither;
93  int new;
96  int ordered_dither[8*8];
97  int diff_mode;
100 
101  /* debug options */
105  uint64_t total_mean_err;
108 
109 #define OFFSET(x) offsetof(PaletteUseContext, x)
110 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
111 static const AVOption paletteuse_options[] = {
112  { "dither", "select dithering mode", OFFSET(dither), AV_OPT_TYPE_INT, {.i64=DITHERING_SIERRA2_4A}, 0, NB_DITHERING-1, FLAGS, "dithering_mode" },
113  { "bayer", "ordered 8x8 bayer dithering (deterministic)", 0, AV_OPT_TYPE_CONST, {.i64=DITHERING_BAYER}, INT_MIN, INT_MAX, FLAGS, "dithering_mode" },
114  { "heckbert", "dithering as defined by Paul Heckbert in 1982 (simple error diffusion)", 0, AV_OPT_TYPE_CONST, {.i64=DITHERING_HECKBERT}, INT_MIN, INT_MAX, FLAGS, "dithering_mode" },
115  { "floyd_steinberg", "Floyd and Steingberg dithering (error diffusion)", 0, AV_OPT_TYPE_CONST, {.i64=DITHERING_FLOYD_STEINBERG}, INT_MIN, INT_MAX, FLAGS, "dithering_mode" },
116  { "sierra2", "Frankie Sierra dithering v2 (error diffusion)", 0, AV_OPT_TYPE_CONST, {.i64=DITHERING_SIERRA2}, INT_MIN, INT_MAX, FLAGS, "dithering_mode" },
117  { "sierra2_4a", "Frankie Sierra dithering v2 \"Lite\" (error diffusion)", 0, AV_OPT_TYPE_CONST, {.i64=DITHERING_SIERRA2_4A}, INT_MIN, INT_MAX, FLAGS, "dithering_mode" },
118  { "bayer_scale", "set scale for bayer dithering", OFFSET(bayer_scale), AV_OPT_TYPE_INT, {.i64=2}, 0, 5, FLAGS },
119  { "diff_mode", "set frame difference mode", OFFSET(diff_mode), AV_OPT_TYPE_INT, {.i64=DIFF_MODE_NONE}, 0, NB_DIFF_MODE-1, FLAGS, "diff_mode" },
120  { "rectangle", "process smallest different rectangle", 0, AV_OPT_TYPE_CONST, {.i64=DIFF_MODE_RECTANGLE}, INT_MIN, INT_MAX, FLAGS, "diff_mode" },
121  { "new", "take new palette for each output frame", OFFSET(new), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS },
122  { "alpha_threshold", "set the alpha threshold for transparency", OFFSET(trans_thresh), AV_OPT_TYPE_INT, {.i64=128}, 0, 255, FLAGS },
123 
124  /* following are the debug options, not part of the official API */
125  { "debug_kdtree", "save Graphviz graph of the kdtree in specified file", OFFSET(dot_filename), AV_OPT_TYPE_STRING, {.str=NULL}, 0, 0, FLAGS },
126  { "color_search", "set reverse colormap color search method", OFFSET(color_search_method), AV_OPT_TYPE_INT, {.i64=COLOR_SEARCH_NNS_ITERATIVE}, 0, NB_COLOR_SEARCHES-1, FLAGS, "search" },
127  { "nns_iterative", "iterative search", 0, AV_OPT_TYPE_CONST, {.i64=COLOR_SEARCH_NNS_ITERATIVE}, INT_MIN, INT_MAX, FLAGS, "search" },
128  { "nns_recursive", "recursive search", 0, AV_OPT_TYPE_CONST, {.i64=COLOR_SEARCH_NNS_RECURSIVE}, INT_MIN, INT_MAX, FLAGS, "search" },
129  { "bruteforce", "brute-force into the palette", 0, AV_OPT_TYPE_CONST, {.i64=COLOR_SEARCH_BRUTEFORCE}, INT_MIN, INT_MAX, FLAGS, "search" },
130  { "mean_err", "compute and print mean error", OFFSET(calc_mean_err), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS },
131  { "debug_accuracy", "test color search accuracy", OFFSET(debug_accuracy), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS },
132  { NULL }
133 };
134 
135 AVFILTER_DEFINE_CLASS(paletteuse);
136 
137 static int load_apply_palette(FFFrameSync *fs);
138 
140 {
141  static const enum AVPixelFormat in_fmts[] = {AV_PIX_FMT_RGB32, AV_PIX_FMT_NONE};
142  static const enum AVPixelFormat inpal_fmts[] = {AV_PIX_FMT_RGB32, AV_PIX_FMT_NONE};
143  static const enum AVPixelFormat out_fmts[] = {AV_PIX_FMT_PAL8, AV_PIX_FMT_NONE};
144  int ret;
145  if ((ret = ff_formats_ref(ff_make_format_list(in_fmts),
146  &ctx->inputs[0]->outcfg.formats)) < 0 ||
147  (ret = ff_formats_ref(ff_make_format_list(inpal_fmts),
148  &ctx->inputs[1]->outcfg.formats)) < 0 ||
150  &ctx->outputs[0]->incfg.formats)) < 0)
151  return ret;
152  return 0;
153 }
154 
155 static av_always_inline uint32_t dither_color(uint32_t px, int er, int eg,
156  int eb, int scale, int shift)
157 {
158  return px >> 24 << 24
159  | av_clip_uint8((px >> 16 & 0xff) + ((er * scale) / (1<<shift))) << 16
160  | av_clip_uint8((px >> 8 & 0xff) + ((eg * scale) / (1<<shift))) << 8
161  | av_clip_uint8((px & 0xff) + ((eb * scale) / (1<<shift)));
162 }
163 
164 static av_always_inline int diff(const uint8_t *c1, const uint8_t *c2, const int trans_thresh)
165 {
166  // XXX: try L*a*b with CIE76 (dL*dL + da*da + db*db)
167  const int dr = c1[1] - c2[1];
168  const int dg = c1[2] - c2[2];
169  const int db = c1[3] - c2[3];
170 
171  if (c1[0] < trans_thresh && c2[0] < trans_thresh) {
172  return 0;
173  } else if (c1[0] >= trans_thresh && c2[0] >= trans_thresh) {
174  return dr*dr + dg*dg + db*db;
175  } else {
176  return 255*255 + 255*255 + 255*255;
177  }
178 }
179 
180 static av_always_inline uint8_t colormap_nearest_bruteforce(const uint32_t *palette, const uint8_t *argb, const int trans_thresh)
181 {
182  int i, pal_id = -1, min_dist = INT_MAX;
183 
184  for (i = 0; i < AVPALETTE_COUNT; i++) {
185  const uint32_t c = palette[i];
186 
187  if (c >> 24 >= trans_thresh) { // ignore transparent entry
188  const uint8_t palargb[] = {
189  palette[i]>>24 & 0xff,
190  palette[i]>>16 & 0xff,
191  palette[i]>> 8 & 0xff,
192  palette[i] & 0xff,
193  };
194  const int d = diff(palargb, argb, trans_thresh);
195  if (d < min_dist) {
196  pal_id = i;
197  min_dist = d;
198  }
199  }
200  }
201  return pal_id;
202 }
203 
204 /* Recursive form, simpler but a bit slower. Kept for reference. */
206  int node_pos;
207  int dist_sqd;
208 };
209 
210 static void colormap_nearest_node(const struct color_node *map,
211  const int node_pos,
212  const uint8_t *target,
213  const int trans_thresh,
214  struct nearest_color *nearest)
215 {
216  const struct color_node *kd = map + node_pos;
217  const int s = kd->split;
218  int dx, nearer_kd_id, further_kd_id;
219  const uint8_t *current = kd->val;
220  const int current_to_target = diff(target, current, trans_thresh);
221 
222  if (current_to_target < nearest->dist_sqd) {
223  nearest->node_pos = node_pos;
224  nearest->dist_sqd = current_to_target;
225  }
226 
227  if (kd->left_id != -1 || kd->right_id != -1) {
228  dx = target[s] - current[s];
229 
230  if (dx <= 0) nearer_kd_id = kd->left_id, further_kd_id = kd->right_id;
231  else nearer_kd_id = kd->right_id, further_kd_id = kd->left_id;
232 
233  if (nearer_kd_id != -1)
234  colormap_nearest_node(map, nearer_kd_id, target, trans_thresh, nearest);
235 
236  if (further_kd_id != -1 && dx*dx < nearest->dist_sqd)
237  colormap_nearest_node(map, further_kd_id, target, trans_thresh, nearest);
238  }
239 }
240 
241 static av_always_inline uint8_t colormap_nearest_recursive(const struct color_node *node, const uint8_t *rgb, const int trans_thresh)
242 {
243  struct nearest_color res = {.dist_sqd = INT_MAX, .node_pos = -1};
244  colormap_nearest_node(node, 0, rgb, trans_thresh, &res);
245  return node[res.node_pos].palette_id;
246 }
247 
248 struct stack_node {
249  int color_id;
250  int dx2;
251 };
252 
253 static av_always_inline uint8_t colormap_nearest_iterative(const struct color_node *root, const uint8_t *target, const int trans_thresh)
254 {
255  int pos = 0, best_node_id = -1, best_dist = INT_MAX, cur_color_id = 0;
256  struct stack_node nodes[16];
257  struct stack_node *node = &nodes[0];
258 
259  for (;;) {
260 
261  const struct color_node *kd = &root[cur_color_id];
262  const uint8_t *current = kd->val;
263  const int current_to_target = diff(target, current, trans_thresh);
264 
265  /* Compare current color node to the target and update our best node if
266  * it's actually better. */
267  if (current_to_target < best_dist) {
268  best_node_id = cur_color_id;
269  if (!current_to_target)
270  goto end; // exact match, we can return immediately
271  best_dist = current_to_target;
272  }
273 
274  /* Check if it's not a leaf */
275  if (kd->left_id != -1 || kd->right_id != -1) {
276  const int split = kd->split;
277  const int dx = target[split] - current[split];
278  int nearer_kd_id, further_kd_id;
279 
280  /* Define which side is the most interesting. */
281  if (dx <= 0) nearer_kd_id = kd->left_id, further_kd_id = kd->right_id;
282  else nearer_kd_id = kd->right_id, further_kd_id = kd->left_id;
283 
284  if (nearer_kd_id != -1) {
285  if (further_kd_id != -1) {
286  /* Here, both paths are defined, so we push a state for
287  * when we are going back. */
288  node->color_id = further_kd_id;
289  node->dx2 = dx*dx;
290  pos++;
291  node++;
292  }
293  /* We can now update current color with the most probable path
294  * (no need to create a state since there is nothing to save
295  * anymore). */
296  cur_color_id = nearer_kd_id;
297  continue;
298  } else if (dx*dx < best_dist) {
299  /* The nearest path isn't available, so there is only one path
300  * possible and it's the least probable. We enter it only if the
301  * distance from the current point to the hyper rectangle is
302  * less than our best distance. */
303  cur_color_id = further_kd_id;
304  continue;
305  }
306  }
307 
308  /* Unstack as much as we can, typically as long as the least probable
309  * branch aren't actually probable. */
310  do {
311  if (--pos < 0)
312  goto end;
313  node--;
314  } while (node->dx2 >= best_dist);
315 
316  /* We got a node where the least probable branch might actually contain
317  * a relevant color. */
318  cur_color_id = node->color_id;
319  }
320 
321 end:
322  return root[best_node_id].palette_id;
323 }
324 
325 #define COLORMAP_NEAREST(search, palette, root, target, trans_thresh) \
326  search == COLOR_SEARCH_NNS_ITERATIVE ? colormap_nearest_iterative(root, target, trans_thresh) : \
327  search == COLOR_SEARCH_NNS_RECURSIVE ? colormap_nearest_recursive(root, target, trans_thresh) : \
328  colormap_nearest_bruteforce(palette, target, trans_thresh)
329 
330 /**
331  * Check if the requested color is in the cache already. If not, find it in the
332  * color tree and cache it.
333  * Note: a, r, g, and b are the components of color, but are passed as well to avoid
334  * recomputing them (they are generally computed by the caller for other uses).
335  */
338  const enum color_search_method search_method)
339 {
340  int i;
341  const uint8_t argb_elts[] = {a, r, g, b};
342  const uint8_t rhash = r & ((1<<NBITS)-1);
343  const uint8_t ghash = g & ((1<<NBITS)-1);
344  const uint8_t bhash = b & ((1<<NBITS)-1);
345  const unsigned hash = rhash<<(NBITS*2) | ghash<<NBITS | bhash;
346  struct cache_node *node = &s->cache[hash];
347  struct cached_color *e;
348 
349  // first, check for transparency
350  if (a < s->trans_thresh && s->transparency_index >= 0) {
351  return s->transparency_index;
352  }
353 
354  for (i = 0; i < node->nb_entries; i++) {
355  e = &node->entries[i];
356  if (e->color == color)
357  return e->pal_entry;
358  }
359 
360  e = av_dynarray2_add((void**)&node->entries, &node->nb_entries,
361  sizeof(*node->entries), NULL);
362  if (!e)
363  return AVERROR(ENOMEM);
364  e->color = color;
365  e->pal_entry = COLORMAP_NEAREST(search_method, s->palette, s->map, argb_elts, s->trans_thresh);
366 
367  return e->pal_entry;
368 }
369 
371  uint32_t c, int *er, int *eg, int *eb,
372  const enum color_search_method search_method)
373 {
374  const uint8_t a = c >> 24 & 0xff;
375  const uint8_t r = c >> 16 & 0xff;
376  const uint8_t g = c >> 8 & 0xff;
377  const uint8_t b = c & 0xff;
378  uint32_t dstc;
379  const int dstx = color_get(s, c, a, r, g, b, search_method);
380  if (dstx < 0)
381  return dstx;
382  dstc = s->palette[dstx];
383  *er = r - (dstc >> 16 & 0xff);
384  *eg = g - (dstc >> 8 & 0xff);
385  *eb = b - (dstc & 0xff);
386  return dstx;
387 }
388 
390  int x_start, int y_start, int w, int h,
391  enum dithering_mode dither,
392  const enum color_search_method search_method)
393 {
394  int x, y;
395  const int src_linesize = in ->linesize[0] >> 2;
396  const int dst_linesize = out->linesize[0];
397  uint32_t *src = ((uint32_t *)in ->data[0]) + y_start*src_linesize;
398  uint8_t *dst = out->data[0] + y_start*dst_linesize;
399 
400  w += x_start;
401  h += y_start;
402 
403  for (y = y_start; y < h; y++) {
404  for (x = x_start; x < w; x++) {
405  int er, eg, eb;
406 
407  if (dither == DITHERING_BAYER) {
408  const int d = s->ordered_dither[(y & 7)<<3 | (x & 7)];
409  const uint8_t a8 = src[x] >> 24 & 0xff;
410  const uint8_t r8 = src[x] >> 16 & 0xff;
411  const uint8_t g8 = src[x] >> 8 & 0xff;
412  const uint8_t b8 = src[x] & 0xff;
413  const uint8_t r = av_clip_uint8(r8 + d);
414  const uint8_t g = av_clip_uint8(g8 + d);
415  const uint8_t b = av_clip_uint8(b8 + d);
416  const int color = color_get(s, src[x], a8, r, g, b, search_method);
417 
418  if (color < 0)
419  return color;
420  dst[x] = color;
421 
422  } else if (dither == DITHERING_HECKBERT) {
423  const int right = x < w - 1, down = y < h - 1;
424  const int color = get_dst_color_err(s, src[x], &er, &eg, &eb, search_method);
425 
426  if (color < 0)
427  return color;
428  dst[x] = color;
429 
430  if (right) src[ x + 1] = dither_color(src[ x + 1], er, eg, eb, 3, 3);
431  if ( down) src[src_linesize + x ] = dither_color(src[src_linesize + x ], er, eg, eb, 3, 3);
432  if (right && down) src[src_linesize + x + 1] = dither_color(src[src_linesize + x + 1], er, eg, eb, 2, 3);
433 
434  } else if (dither == DITHERING_FLOYD_STEINBERG) {
435  const int right = x < w - 1, down = y < h - 1, left = x > x_start;
436  const int color = get_dst_color_err(s, src[x], &er, &eg, &eb, search_method);
437 
438  if (color < 0)
439  return color;
440  dst[x] = color;
441 
442  if (right) src[ x + 1] = dither_color(src[ x + 1], er, eg, eb, 7, 4);
443  if (left && down) src[src_linesize + x - 1] = dither_color(src[src_linesize + x - 1], er, eg, eb, 3, 4);
444  if ( down) src[src_linesize + x ] = dither_color(src[src_linesize + x ], er, eg, eb, 5, 4);
445  if (right && down) src[src_linesize + x + 1] = dither_color(src[src_linesize + x + 1], er, eg, eb, 1, 4);
446 
447  } else if (dither == DITHERING_SIERRA2) {
448  const int right = x < w - 1, down = y < h - 1, left = x > x_start;
449  const int right2 = x < w - 2, left2 = x > x_start + 1;
450  const int color = get_dst_color_err(s, src[x], &er, &eg, &eb, search_method);
451 
452  if (color < 0)
453  return color;
454  dst[x] = color;
455 
456  if (right) src[ x + 1] = dither_color(src[ x + 1], er, eg, eb, 4, 4);
457  if (right2) src[ x + 2] = dither_color(src[ x + 2], er, eg, eb, 3, 4);
458 
459  if (down) {
460  if (left2) src[ src_linesize + x - 2] = dither_color(src[ src_linesize + x - 2], er, eg, eb, 1, 4);
461  if (left) src[ src_linesize + x - 1] = dither_color(src[ src_linesize + x - 1], er, eg, eb, 2, 4);
462  if (1) src[ src_linesize + x ] = dither_color(src[ src_linesize + x ], er, eg, eb, 3, 4);
463  if (right) src[ src_linesize + x + 1] = dither_color(src[ src_linesize + x + 1], er, eg, eb, 2, 4);
464  if (right2) src[ src_linesize + x + 2] = dither_color(src[ src_linesize + x + 2], er, eg, eb, 1, 4);
465  }
466 
467  } else if (dither == DITHERING_SIERRA2_4A) {
468  const int right = x < w - 1, down = y < h - 1, left = x > x_start;
469  const int color = get_dst_color_err(s, src[x], &er, &eg, &eb, search_method);
470 
471  if (color < 0)
472  return color;
473  dst[x] = color;
474 
475  if (right) src[ x + 1] = dither_color(src[ x + 1], er, eg, eb, 2, 2);
476  if (left && down) src[src_linesize + x - 1] = dither_color(src[src_linesize + x - 1], er, eg, eb, 1, 2);
477  if ( down) src[src_linesize + x ] = dither_color(src[src_linesize + x ], er, eg, eb, 1, 2);
478 
479  } else {
480  const uint8_t a = src[x] >> 24 & 0xff;
481  const uint8_t r = src[x] >> 16 & 0xff;
482  const uint8_t g = src[x] >> 8 & 0xff;
483  const uint8_t b = src[x] & 0xff;
484  const int color = color_get(s, src[x], a, r, g, b, search_method);
485 
486  if (color < 0)
487  return color;
488  dst[x] = color;
489  }
490  }
491  src += src_linesize;
492  dst += dst_linesize;
493  }
494  return 0;
495 }
496 
497 #define INDENT 4
498 static void disp_node(AVBPrint *buf,
499  const struct color_node *map,
500  int parent_id, int node_id,
501  int depth)
502 {
503  const struct color_node *node = &map[node_id];
504  const uint32_t fontcolor = node->val[1] > 0x50 &&
505  node->val[2] > 0x50 &&
506  node->val[3] > 0x50 ? 0 : 0xffffff;
507  const int rgb_comp = node->split - 1;
508  av_bprintf(buf, "%*cnode%d ["
509  "label=\"%c%02X%c%02X%c%02X%c\" "
510  "fillcolor=\"#%02x%02x%02x\" "
511  "fontcolor=\"#%06"PRIX32"\"]\n",
512  depth*INDENT, ' ', node->palette_id,
513  "[ "[rgb_comp], node->val[1],
514  "][ "[rgb_comp], node->val[2],
515  " ]["[rgb_comp], node->val[3],
516  " ]"[rgb_comp],
517  node->val[1], node->val[2], node->val[3],
518  fontcolor);
519  if (parent_id != -1)
520  av_bprintf(buf, "%*cnode%d -> node%d\n", depth*INDENT, ' ',
521  map[parent_id].palette_id, node->palette_id);
522  if (node->left_id != -1) disp_node(buf, map, node_id, node->left_id, depth + 1);
523  if (node->right_id != -1) disp_node(buf, map, node_id, node->right_id, depth + 1);
524 }
525 
526 // debug_kdtree=kdtree.dot -> dot -Tpng kdtree.dot > kdtree.png
527 static int disp_tree(const struct color_node *node, const char *fname)
528 {
529  AVBPrint buf;
530  FILE *f = av_fopen_utf8(fname, "w");
531 
532  if (!f) {
533  int ret = AVERROR(errno);
534  av_log(NULL, AV_LOG_ERROR, "Cannot open file '%s' for writing: %s\n",
535  fname, av_err2str(ret));
536  return ret;
537  }
538 
540 
541  av_bprintf(&buf, "digraph {\n");
542  av_bprintf(&buf, " node [style=filled fontsize=10 shape=box]\n");
543  disp_node(&buf, node, -1, 0, 0);
544  av_bprintf(&buf, "}\n");
545 
546  fwrite(buf.str, 1, buf.len, f);
547  fclose(f);
548  av_bprint_finalize(&buf, NULL);
549  return 0;
550 }
551 
552 static int debug_accuracy(const struct color_node *node, const uint32_t *palette, const int trans_thresh,
553  const enum color_search_method search_method)
554 {
555  int r, g, b, ret = 0;
556 
557  for (r = 0; r < 256; r++) {
558  for (g = 0; g < 256; g++) {
559  for (b = 0; b < 256; b++) {
560  const uint8_t argb[] = {0xff, r, g, b};
561  const int r1 = COLORMAP_NEAREST(search_method, palette, node, argb, trans_thresh);
562  const int r2 = colormap_nearest_bruteforce(palette, argb, trans_thresh);
563  if (r1 != r2) {
564  const uint32_t c1 = palette[r1];
565  const uint32_t c2 = palette[r2];
566  const uint8_t palargb1[] = { 0xff, c1>>16 & 0xff, c1>> 8 & 0xff, c1 & 0xff };
567  const uint8_t palargb2[] = { 0xff, c2>>16 & 0xff, c2>> 8 & 0xff, c2 & 0xff };
568  const int d1 = diff(palargb1, argb, trans_thresh);
569  const int d2 = diff(palargb2, argb, trans_thresh);
570  if (d1 != d2) {
572  "/!\\ %02X%02X%02X: %d ! %d (%06"PRIX32" ! %06"PRIX32") / dist: %d ! %d\n",
573  r, g, b, r1, r2, c1 & 0xffffff, c2 & 0xffffff, d1, d2);
574  ret = 1;
575  }
576  }
577  }
578  }
579  }
580  return ret;
581 }
582 
583 struct color {
584  uint32_t value;
586 };
587 
588 struct color_rect {
591 };
592 
593 typedef int (*cmp_func)(const void *, const void *);
594 
595 #define DECLARE_CMP_FUNC(name, pos) \
596 static int cmp_##name(const void *pa, const void *pb) \
597 { \
598  const struct color *a = pa; \
599  const struct color *b = pb; \
600  return (a->value >> (8 * (3 - (pos))) & 0xff) \
601  - (b->value >> (8 * (3 - (pos))) & 0xff); \
602 }
603 
608 
609 static const cmp_func cmp_funcs[] = {cmp_a, cmp_r, cmp_g, cmp_b};
610 
611 static int get_next_color(const uint8_t *color_used, const uint32_t *palette,
612  const int trans_thresh,
613  int *component, const struct color_rect *box)
614 {
615  int wr, wg, wb;
616  int i, longest = 0;
617  unsigned nb_color = 0;
618  struct color_rect ranges;
619  struct color tmp_pal[256];
620  cmp_func cmpf;
621 
622  ranges.min[0] = ranges.min[1] = ranges.min[2] = 0xff;
623  ranges.max[0] = ranges.max[1] = ranges.max[2] = 0x00;
624 
625  for (i = 0; i < AVPALETTE_COUNT; i++) {
626  const uint32_t c = palette[i];
627  const uint8_t a = c >> 24 & 0xff;
628  const uint8_t r = c >> 16 & 0xff;
629  const uint8_t g = c >> 8 & 0xff;
630  const uint8_t b = c & 0xff;
631 
632  if (a < trans_thresh) {
633  continue;
634  }
635 
636  if (color_used[i] || (a != 0xff) ||
637  r < box->min[0] || g < box->min[1] || b < box->min[2] ||
638  r > box->max[0] || g > box->max[1] || b > box->max[2])
639  continue;
640 
641  if (r < ranges.min[0]) ranges.min[0] = r;
642  if (g < ranges.min[1]) ranges.min[1] = g;
643  if (b < ranges.min[2]) ranges.min[2] = b;
644 
645  if (r > ranges.max[0]) ranges.max[0] = r;
646  if (g > ranges.max[1]) ranges.max[1] = g;
647  if (b > ranges.max[2]) ranges.max[2] = b;
648 
649  tmp_pal[nb_color].value = c;
650  tmp_pal[nb_color].pal_id = i;
651 
652  nb_color++;
653  }
654 
655  if (!nb_color)
656  return -1;
657 
658  /* define longest axis that will be the split component */
659  wr = ranges.max[0] - ranges.min[0];
660  wg = ranges.max[1] - ranges.min[1];
661  wb = ranges.max[2] - ranges.min[2];
662  if (wr >= wg && wr >= wb) longest = 1;
663  if (wg >= wr && wg >= wb) longest = 2;
664  if (wb >= wr && wb >= wg) longest = 3;
665  cmpf = cmp_funcs[longest];
666  *component = longest;
667 
668  /* sort along this axis to get median */
669  AV_QSORT(tmp_pal, nb_color, struct color, cmpf);
670 
671  return tmp_pal[nb_color >> 1].pal_id;
672 }
673 
674 static int colormap_insert(struct color_node *map,
675  uint8_t *color_used,
676  int *nb_used,
677  const uint32_t *palette,
678  const int trans_thresh,
679  const struct color_rect *box)
680 {
681  uint32_t c;
682  int component, cur_id;
683  int node_left_id = -1, node_right_id = -1;
684  struct color_node *node;
685  struct color_rect box1, box2;
686  const int pal_id = get_next_color(color_used, palette, trans_thresh, &component, box);
687 
688  if (pal_id < 0)
689  return -1;
690 
691  /* create new node with that color */
692  cur_id = (*nb_used)++;
693  c = palette[pal_id];
694  node = &map[cur_id];
695  node->split = component;
696  node->palette_id = pal_id;
697  node->val[0] = c>>24 & 0xff;
698  node->val[1] = c>>16 & 0xff;
699  node->val[2] = c>> 8 & 0xff;
700  node->val[3] = c & 0xff;
701 
702  color_used[pal_id] = 1;
703 
704  /* get the two boxes this node creates */
705  box1 = box2 = *box;
706  box1.max[component-1] = node->val[component];
707  box2.min[component-1] = node->val[component] + 1;
708 
709  node_left_id = colormap_insert(map, color_used, nb_used, palette, trans_thresh, &box1);
710 
711  if (box2.min[component-1] <= box2.max[component-1])
712  node_right_id = colormap_insert(map, color_used, nb_used, palette, trans_thresh, &box2);
713 
714  node->left_id = node_left_id;
715  node->right_id = node_right_id;
716 
717  return cur_id;
718 }
719 
720 static int cmp_pal_entry(const void *a, const void *b)
721 {
722  const int c1 = *(const uint32_t *)a & 0xffffff;
723  const int c2 = *(const uint32_t *)b & 0xffffff;
724  return c1 - c2;
725 }
726 
728 {
729  int i, nb_used = 0;
730  uint8_t color_used[AVPALETTE_COUNT] = {0};
731  uint32_t last_color = 0;
732  struct color_rect box;
733 
734  /* disable transparent colors and dups */
735  qsort(s->palette, AVPALETTE_COUNT, sizeof(*s->palette), cmp_pal_entry);
736  // update transparency index:
737  if (s->transparency_index >= 0) {
738  for (i = 0; i < AVPALETTE_COUNT; i++) {
739  if ((s->palette[i]>>24 & 0xff) == 0) {
740  s->transparency_index = i; // we are assuming at most one transparent color in palette
741  break;
742  }
743  }
744  }
745 
746  for (i = 0; i < AVPALETTE_COUNT; i++) {
747  const uint32_t c = s->palette[i];
748  if (i != 0 && c == last_color) {
749  color_used[i] = 1;
750  continue;
751  }
752  last_color = c;
753  if (c >> 24 < s->trans_thresh) {
754  color_used[i] = 1; // ignore transparent color(s)
755  continue;
756  }
757  }
758 
759  box.min[0] = box.min[1] = box.min[2] = 0x00;
760  box.max[0] = box.max[1] = box.max[2] = 0xff;
761 
762  colormap_insert(s->map, color_used, &nb_used, s->palette, s->trans_thresh, &box);
763 
764  if (s->dot_filename)
765  disp_tree(s->map, s->dot_filename);
766 
767  if (s->debug_accuracy) {
768  if (!debug_accuracy(s->map, s->palette, s->trans_thresh, s->color_search_method))
769  av_log(NULL, AV_LOG_INFO, "Accuracy check passed\n");
770  }
771 }
772 
773 static void debug_mean_error(PaletteUseContext *s, const AVFrame *in1,
774  const AVFrame *in2, int frame_count)
775 {
776  int x, y;
777  const uint32_t *palette = s->palette;
778  uint32_t *src1 = (uint32_t *)in1->data[0];
779  uint8_t *src2 = in2->data[0];
780  const int src1_linesize = in1->linesize[0] >> 2;
781  const int src2_linesize = in2->linesize[0];
782  const float div = in1->width * in1->height * 3;
783  unsigned mean_err = 0;
784 
785  for (y = 0; y < in1->height; y++) {
786  for (x = 0; x < in1->width; x++) {
787  const uint32_t c1 = src1[x];
788  const uint32_t c2 = palette[src2[x]];
789  const uint8_t argb1[] = {0xff, c1 >> 16 & 0xff, c1 >> 8 & 0xff, c1 & 0xff};
790  const uint8_t argb2[] = {0xff, c2 >> 16 & 0xff, c2 >> 8 & 0xff, c2 & 0xff};
791  mean_err += diff(argb1, argb2, s->trans_thresh);
792  }
793  src1 += src1_linesize;
794  src2 += src2_linesize;
795  }
796 
797  s->total_mean_err += mean_err;
798 
799  av_log(NULL, AV_LOG_INFO, "MEP:%.3f TotalMEP:%.3f\n",
800  mean_err / div, s->total_mean_err / (div * frame_count));
801 }
802 
804  const AVFrame *prv_src, const AVFrame *cur_src,
805  const AVFrame *prv_dst, AVFrame *cur_dst,
806  int *xp, int *yp, int *wp, int *hp)
807 {
808  int x_start = 0, y_start = 0;
809  int width = cur_src->width;
810  int height = cur_src->height;
811 
812  if (prv_src->data[0] && diff_mode == DIFF_MODE_RECTANGLE) {
813  int y;
814  int x_end = cur_src->width - 1,
815  y_end = cur_src->height - 1;
816  const uint32_t *prv_srcp = (const uint32_t *)prv_src->data[0];
817  const uint32_t *cur_srcp = (const uint32_t *)cur_src->data[0];
818  const uint8_t *prv_dstp = prv_dst->data[0];
819  uint8_t *cur_dstp = cur_dst->data[0];
820 
821  const int prv_src_linesize = prv_src->linesize[0] >> 2;
822  const int cur_src_linesize = cur_src->linesize[0] >> 2;
823  const int prv_dst_linesize = prv_dst->linesize[0];
824  const int cur_dst_linesize = cur_dst->linesize[0];
825 
826  /* skip common lines */
827  while (y_start < y_end && !memcmp(prv_srcp + y_start*prv_src_linesize,
828  cur_srcp + y_start*cur_src_linesize,
829  cur_src->width * 4)) {
830  memcpy(cur_dstp + y_start*cur_dst_linesize,
831  prv_dstp + y_start*prv_dst_linesize,
832  cur_dst->width);
833  y_start++;
834  }
835  while (y_end > y_start && !memcmp(prv_srcp + y_end*prv_src_linesize,
836  cur_srcp + y_end*cur_src_linesize,
837  cur_src->width * 4)) {
838  memcpy(cur_dstp + y_end*cur_dst_linesize,
839  prv_dstp + y_end*prv_dst_linesize,
840  cur_dst->width);
841  y_end--;
842  }
843 
844  height = y_end + 1 - y_start;
845 
846  /* skip common columns */
847  while (x_start < x_end) {
848  int same_column = 1;
849  for (y = y_start; y <= y_end; y++) {
850  if (prv_srcp[y*prv_src_linesize + x_start] != cur_srcp[y*cur_src_linesize + x_start]) {
851  same_column = 0;
852  break;
853  }
854  }
855  if (!same_column)
856  break;
857  x_start++;
858  }
859  while (x_end > x_start) {
860  int same_column = 1;
861  for (y = y_start; y <= y_end; y++) {
862  if (prv_srcp[y*prv_src_linesize + x_end] != cur_srcp[y*cur_src_linesize + x_end]) {
863  same_column = 0;
864  break;
865  }
866  }
867  if (!same_column)
868  break;
869  x_end--;
870  }
871  width = x_end + 1 - x_start;
872 
873  if (x_start) {
874  for (y = y_start; y <= y_end; y++)
875  memcpy(cur_dstp + y*cur_dst_linesize,
876  prv_dstp + y*prv_dst_linesize, x_start);
877  }
878  if (x_end != cur_src->width - 1) {
879  const int copy_len = cur_src->width - 1 - x_end;
880  for (y = y_start; y <= y_end; y++)
881  memcpy(cur_dstp + y*cur_dst_linesize + x_end + 1,
882  prv_dstp + y*prv_dst_linesize + x_end + 1,
883  copy_len);
884  }
885  }
886  *xp = x_start;
887  *yp = y_start;
888  *wp = width;
889  *hp = height;
890 }
891 
893 {
894  int x, y, w, h, ret;
895  AVFilterContext *ctx = inlink->dst;
896  PaletteUseContext *s = ctx->priv;
897  AVFilterLink *outlink = inlink->dst->outputs[0];
898 
899  AVFrame *out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
900  if (!out) {
901  *outf = NULL;
902  return AVERROR(ENOMEM);
903  }
905 
906  set_processing_window(s->diff_mode, s->last_in, in,
907  s->last_out, out, &x, &y, &w, &h);
908  av_frame_unref(s->last_in);
909  av_frame_unref(s->last_out);
910  if ((ret = av_frame_ref(s->last_in, in)) < 0 ||
911  (ret = av_frame_ref(s->last_out, out)) < 0 ||
912  (ret = av_frame_make_writable(s->last_in)) < 0) {
913  av_frame_free(&out);
914  *outf = NULL;
915  return ret;
916  }
917 
918  ff_dlog(ctx, "%dx%d rect: (%d;%d) -> (%d,%d) [area:%dx%d]\n",
919  w, h, x, y, x+w, y+h, in->width, in->height);
920 
921  ret = s->set_frame(s, out, in, x, y, w, h);
922  if (ret < 0) {
923  av_frame_free(&out);
924  *outf = NULL;
925  return ret;
926  }
927  memcpy(out->data[1], s->palette, AVPALETTE_SIZE);
928  if (s->calc_mean_err)
929  debug_mean_error(s, in, out, inlink->frame_count_out);
930  *outf = out;
931  return 0;
932 }
933 
934 static int config_output(AVFilterLink *outlink)
935 {
936  int ret;
937  AVFilterContext *ctx = outlink->src;
938  PaletteUseContext *s = ctx->priv;
939 
941  if (ret < 0)
942  return ret;
943  s->fs.opt_repeatlast = 1; // only 1 frame in the palette
944  s->fs.in[1].before = s->fs.in[1].after = EXT_INFINITY;
945  s->fs.on_event = load_apply_palette;
946 
947  outlink->w = ctx->inputs[0]->w;
948  outlink->h = ctx->inputs[0]->h;
949 
950  outlink->time_base = ctx->inputs[0]->time_base;
951  if ((ret = ff_framesync_configure(&s->fs)) < 0)
952  return ret;
953  return 0;
954 }
955 
957 {
958  AVFilterContext *ctx = inlink->dst;
959 
960  if (inlink->w * inlink->h != AVPALETTE_COUNT) {
962  "Palette input must contain exactly %d pixels. "
963  "Specified input has %dx%d=%d pixels\n",
964  AVPALETTE_COUNT, inlink->w, inlink->h,
965  inlink->w * inlink->h);
966  return AVERROR(EINVAL);
967  }
968  return 0;
969 }
970 
971 static void load_palette(PaletteUseContext *s, const AVFrame *palette_frame)
972 {
973  int i, x, y;
974  const uint32_t *p = (const uint32_t *)palette_frame->data[0];
975  const int p_linesize = palette_frame->linesize[0] >> 2;
976 
977  s->transparency_index = -1;
978 
979  if (s->new) {
980  memset(s->palette, 0, sizeof(s->palette));
981  memset(s->map, 0, sizeof(s->map));
982  for (i = 0; i < CACHE_SIZE; i++)
983  av_freep(&s->cache[i].entries);
984  memset(s->cache, 0, sizeof(s->cache));
985  }
986 
987  i = 0;
988  for (y = 0; y < palette_frame->height; y++) {
989  for (x = 0; x < palette_frame->width; x++) {
990  s->palette[i] = p[x];
991  if (p[x]>>24 < s->trans_thresh) {
992  s->transparency_index = i; // we are assuming at most one transparent color in palette
993  }
994  i++;
995  }
996  p += p_linesize;
997  }
998 
999  load_colormap(s);
1000 
1001  if (!s->new)
1002  s->palette_loaded = 1;
1003 }
1004 
1006 {
1007  AVFilterContext *ctx = fs->parent;
1008  AVFilterLink *inlink = ctx->inputs[0];
1009  PaletteUseContext *s = ctx->priv;
1010  AVFrame *master, *second, *out = NULL;
1011  int ret;
1012 
1013  // writable for error diffusal dithering
1015  if (ret < 0)
1016  return ret;
1017  if (!master || !second) {
1019  return AVERROR_BUG;
1020  }
1021  if (!s->palette_loaded) {
1022  load_palette(s, second);
1023  }
1026  if (ret < 0)
1027  return ret;
1028  return ff_filter_frame(ctx->outputs[0], out);
1029 }
1030 
1031 #define DEFINE_SET_FRAME(color_search, name, value) \
1032 static int set_frame_##name(PaletteUseContext *s, AVFrame *out, AVFrame *in, \
1033  int x_start, int y_start, int w, int h) \
1034 { \
1035  return set_frame(s, out, in, x_start, y_start, w, h, value, color_search); \
1036 }
1037 
1038 #define DEFINE_SET_FRAME_COLOR_SEARCH(color_search, color_search_macro) \
1039  DEFINE_SET_FRAME(color_search_macro, color_search##_##none, DITHERING_NONE) \
1040  DEFINE_SET_FRAME(color_search_macro, color_search##_##bayer, DITHERING_BAYER) \
1041  DEFINE_SET_FRAME(color_search_macro, color_search##_##heckbert, DITHERING_HECKBERT) \
1042  DEFINE_SET_FRAME(color_search_macro, color_search##_##floyd_steinberg, DITHERING_FLOYD_STEINBERG) \
1043  DEFINE_SET_FRAME(color_search_macro, color_search##_##sierra2, DITHERING_SIERRA2) \
1044  DEFINE_SET_FRAME(color_search_macro, color_search##_##sierra2_4a, DITHERING_SIERRA2_4A) \
1045 
1049 
1050 #define DITHERING_ENTRIES(color_search) { \
1051  set_frame_##color_search##_none, \
1052  set_frame_##color_search##_bayer, \
1053  set_frame_##color_search##_heckbert, \
1054  set_frame_##color_search##_floyd_steinberg, \
1055  set_frame_##color_search##_sierra2, \
1056  set_frame_##color_search##_sierra2_4a, \
1057 }
1058 
1060  DITHERING_ENTRIES(nns_iterative),
1061  DITHERING_ENTRIES(nns_recursive),
1062  DITHERING_ENTRIES(bruteforce),
1063 };
1064 
1065 static int dither_value(int p)
1066 {
1067  const int q = p ^ (p >> 3);
1068  return (p & 4) >> 2 | (q & 4) >> 1 \
1069  | (p & 2) << 1 | (q & 2) << 2 \
1070  | (p & 1) << 4 | (q & 1) << 5;
1071 }
1072 
1074 {
1075  PaletteUseContext *s = ctx->priv;
1076 
1077  s->last_in = av_frame_alloc();
1078  s->last_out = av_frame_alloc();
1079  if (!s->last_in || !s->last_out) {
1080  av_frame_free(&s->last_in);
1081  av_frame_free(&s->last_out);
1082  return AVERROR(ENOMEM);
1083  }
1084 
1085  s->set_frame = set_frame_lut[s->color_search_method][s->dither];
1086 
1087  if (s->dither == DITHERING_BAYER) {
1088  int i;
1089  const int delta = 1 << (5 - s->bayer_scale); // to avoid too much luma
1090 
1091  for (i = 0; i < FF_ARRAY_ELEMS(s->ordered_dither); i++)
1092  s->ordered_dither[i] = (dither_value(i) >> s->bayer_scale) - delta;
1093  }
1094 
1095  return 0;
1096 }
1097 
1099 {
1100  PaletteUseContext *s = ctx->priv;
1101  return ff_framesync_activate(&s->fs);
1102 }
1103 
1105 {
1106  int i;
1107  PaletteUseContext *s = ctx->priv;
1108 
1109  ff_framesync_uninit(&s->fs);
1110  for (i = 0; i < CACHE_SIZE; i++)
1111  av_freep(&s->cache[i].entries);
1112  av_frame_free(&s->last_in);
1113  av_frame_free(&s->last_out);
1114 }
1115 
1116 static const AVFilterPad paletteuse_inputs[] = {
1117  {
1118  .name = "default",
1119  .type = AVMEDIA_TYPE_VIDEO,
1120  },{
1121  .name = "palette",
1122  .type = AVMEDIA_TYPE_VIDEO,
1123  .config_props = config_input_palette,
1124  },
1125  { NULL }
1126 };
1127 
1129  {
1130  .name = "default",
1131  .type = AVMEDIA_TYPE_VIDEO,
1132  .config_props = config_output,
1133  },
1134  { NULL }
1135 };
1136 
1138  .name = "paletteuse",
1139  .description = NULL_IF_CONFIG_SMALL("Use a palette to downsample an input video stream."),
1140  .priv_size = sizeof(PaletteUseContext),
1142  .init = init,
1143  .uninit = uninit,
1144  .activate = activate,
1147  .priv_class = &paletteuse_class,
1148 };
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:99
cached_color::color
uint32_t color
Definition: vf_paletteuse.c:69
ff_framesync_configure
int ff_framesync_configure(FFFrameSync *fs)
Configure a frame sync structure.
Definition: framesync.c:124
AV_BPRINT_SIZE_UNLIMITED
#define AV_BPRINT_SIZE_UNLIMITED
config_input_palette
static int config_input_palette(AVFilterLink *inlink)
Definition: vf_paletteuse.c:956
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
diff
static av_always_inline int diff(const uint8_t *c1, const uint8_t *c2, const int trans_thresh)
Definition: vf_paletteuse.c:164
get_dst_color_err
static av_always_inline int get_dst_color_err(PaletteUseContext *s, uint32_t c, int *er, int *eg, int *eb, const enum color_search_method search_method)
Definition: vf_paletteuse.c:370
colormap_nearest_node
static void colormap_nearest_node(const struct color_node *map, const int node_pos, const uint8_t *target, const int trans_thresh, struct nearest_color *nearest)
Definition: vf_paletteuse.c:210
query_formats
static int query_formats(AVFilterContext *ctx)
Definition: vf_paletteuse.c:139
PaletteUseContext::dot_filename
char * dot_filename
Definition: vf_paletteuse.c:102
r
const char * r
Definition: vf_curves.c:116
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
ff_make_format_list
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:286
av_bprint_finalize
int av_bprint_finalize(AVBPrint *buf, char **ret_str)
Finalize a print buffer.
Definition: bprint.c:235
ff_framesync_uninit
void ff_framesync_uninit(FFFrameSync *fs)
Free all memory currently allocated.
Definition: framesync.c:290
debug_mean_error
static void debug_mean_error(PaletteUseContext *s, const AVFrame *in1, const AVFrame *in2, int frame_count)
Definition: vf_paletteuse.c:773
out
FILE * out
Definition: movenc.c:54
color
Definition: vf_paletteuse.c:583
av_bprint_init
void av_bprint_init(AVBPrint *buf, unsigned size_init, unsigned size_max)
Definition: bprint.c:69
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1096
PaletteUseContext::last_out
AVFrame * last_out
Definition: vf_paletteuse.c:99
dither_color
static av_always_inline uint32_t dither_color(uint32_t px, int er, int eg, int eb, int scale, int shift)
Definition: vf_paletteuse.c:155
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
init
static av_cold int init(AVFilterContext *ctx)
Definition: vf_paletteuse.c:1073
set_frame_func
int(* set_frame_func)(struct PaletteUseContext *s, AVFrame *out, AVFrame *in, int x_start, int y_start, int width, int height)
Definition: vf_paletteuse.c:80
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:318
av_frame_make_writable
int av_frame_make_writable(AVFrame *frame)
Ensure that the frame data is writable, avoiding data copy if possible.
Definition: frame.c:611
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_paletteuse.c:1104
AVFrame::width
int width
Definition: frame.h:376
w
uint8_t w
Definition: llviddspenc.c:39
av_dynarray2_add
void * av_dynarray2_add(void **tab_ptr, int *nb_ptr, size_t elem_size, const uint8_t *elem_data)
Add an element of size elem_size to a dynamic array.
Definition: mem.c:324
AVOption
AVOption.
Definition: opt.h:248
b
#define b
Definition: input.c:41
stack_node::dx2
int dx2
Definition: vf_paletteuse.c:250
data
const char data[16]
Definition: mxf.c:142
colormap_nearest_bruteforce
static av_always_inline uint8_t colormap_nearest_bruteforce(const uint32_t *palette, const uint8_t *argb, const int trans_thresh)
Definition: vf_paletteuse.c:180
PaletteUseContext::set_frame
set_frame_func set_frame
Definition: vf_paletteuse.c:94
ff_vf_paletteuse
AVFilter ff_vf_paletteuse
Definition: vf_paletteuse.c:1137
disp_tree
static int disp_tree(const struct color_node *node, const char *fname)
Definition: vf_paletteuse.c:527
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:149
c1
static const uint64_t c1
Definition: murmur3.c:51
FFFrameSync
Frame sync structure.
Definition: framesync.h:146
EXT_INFINITY
@ EXT_INFINITY
Extend the frame to infinity.
Definition: framesync.h:75
hash
uint8_t hash[HASH_SIZE]
Definition: movenc.c:57
PaletteUseContext::palette_loaded
int palette_loaded
Definition: vf_paletteuse.c:91
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:332
colormap_nearest_recursive
static av_always_inline uint8_t colormap_nearest_recursive(const struct color_node *node, const uint8_t *rgb, const int trans_thresh)
Definition: vf_paletteuse.c:241
stack_node::color_id
int color_id
Definition: vf_paletteuse.c:249
debug_accuracy
static int debug_accuracy(const struct color_node *node, const uint32_t *palette, const int trans_thresh, const enum color_search_method search_method)
Definition: vf_paletteuse.c:552
DIFF_MODE_NONE
@ DIFF_MODE_NONE
Definition: vf_paletteuse.c:53
rgb
Definition: rpzaenc.c:58
NB_DITHERING
@ NB_DITHERING
Definition: vf_paletteuse.c:42
dither_value
static int dither_value(int p)
Definition: vf_paletteuse.c:1065
COLOR_SEARCH_BRUTEFORCE
@ COLOR_SEARCH_BRUTEFORCE
Definition: vf_paletteuse.c:48
apply_palette
static int apply_palette(AVFilterLink *inlink, AVFrame *in, AVFrame **outf)
Definition: vf_paletteuse.c:892
PaletteUseContext::cache
struct cache_node cache[CACHE_SIZE]
Definition: vf_paletteuse.c:86
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:54
colormap_insert
static int colormap_insert(struct color_node *map, uint8_t *color_used, int *nb_used, const uint32_t *palette, const int trans_thresh, const struct color_rect *box)
Definition: vf_paletteuse.c:674
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:190
PaletteUseContext::ordered_dither
int ordered_dither[8 *8]
Definition: vf_paletteuse.c:96
colormap_nearest_iterative
static av_always_inline uint8_t colormap_nearest_iterative(const struct color_node *root, const uint8_t *target, const int trans_thresh)
Definition: vf_paletteuse.c:253
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:194
set_processing_window
static void set_processing_window(enum diff_mode diff_mode, const AVFrame *prv_src, const AVFrame *cur_src, const AVFrame *prv_dst, AVFrame *cur_dst, int *xp, int *yp, int *wp, int *hp)
Definition: vf_paletteuse.c:803
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:90
av_fopen_utf8
FILE * av_fopen_utf8(const char *path, const char *mode)
Open a file using a UTF-8 filename.
Definition: file_open.c:158
INDENT
#define INDENT
Definition: vf_paletteuse.c:497
color_rect
Definition: vf_paletteuse.c:588
DEFINE_SET_FRAME_COLOR_SEARCH
#define DEFINE_SET_FRAME_COLOR_SEARCH(color_search, color_search_macro)
Definition: vf_paletteuse.c:1038
PaletteUseContext::bayer_scale
int bayer_scale
Definition: vf_paletteuse.c:95
width
#define width
s
#define s(width, name)
Definition: cbs_vp9.c:257
dithering_mode
dithering_mode
Definition: vf_paletteuse.c:35
config_output
static int config_output(AVFilterLink *outlink)
Definition: vf_paletteuse.c:934
g
const char * g
Definition: vf_curves.c:117
ff_formats_ref
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:466
color_node::right_id
int right_id
Definition: vf_paletteuse.c:62
DITHERING_HECKBERT
@ DITHERING_HECKBERT
Definition: vf_paletteuse.c:38
stack_node
Definition: vf_paletteuse.c:248
outputs
static const AVFilterPad outputs[]
Definition: af_acontrast.c:203
filters.h
nearest_color::dist_sqd
int dist_sqd
Definition: vf_paletteuse.c:207
ctx
AVFormatContext * ctx
Definition: movenc.c:48
set_frame_lut
static const set_frame_func set_frame_lut[NB_COLOR_SEARCHES][NB_DITHERING]
Definition: vf_paletteuse.c:1059
color_rect::max
uint8_t max[3]
Definition: vf_paletteuse.c:590
f
#define f(width, name)
Definition: cbs_vp9.c:255
if
if(ret)
Definition: filter_design.txt:179
color_node::palette_id
uint8_t palette_id
Definition: vf_paletteuse.c:60
load_apply_palette
static int load_apply_palette(FFFrameSync *fs)
Definition: vf_paletteuse.c:1005
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:67
NULL
#define NULL
Definition: coverity.c:32
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:658
PaletteUseContext::dither
int dither
Definition: vf_paletteuse.c:92
fs
#define fs(width, name, subs,...)
Definition: cbs_vp9.c:259
AVPALETTE_SIZE
#define AVPALETTE_SIZE
Definition: pixfmt.h:32
COLORMAP_NEAREST
#define COLORMAP_NEAREST(search, palette, root, target, trans_thresh)
Definition: vf_paletteuse.c:325
src
#define src
Definition: vp8dsp.c:255
get_next_color
static int get_next_color(const uint8_t *color_used, const uint32_t *palette, const int trans_thresh, int *component, const struct color_rect *box)
Definition: vf_paletteuse.c:611
DITHERING_ENTRIES
#define DITHERING_ENTRIES(color_search)
Definition: vf_paletteuse.c:1050
inputs
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several inputs
Definition: filter_design.txt:243
PaletteUseContext
Definition: vf_paletteuse.c:83
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
cmp_funcs
static const cmp_func cmp_funcs[]
Definition: vf_paletteuse.c:609
AVPALETTE_COUNT
#define AVPALETTE_COUNT
Definition: pixfmt.h:33
disp_node
static void disp_node(AVBPrint *buf, const struct color_node *map, int parent_id, int node_id, int depth)
Definition: vf_paletteuse.c:498
DITHERING_NONE
@ DITHERING_NONE
Definition: vf_paletteuse.c:36
for
for(j=16;j >0;--j)
Definition: h264pred_template.c:469
paletteuse_options
static const AVOption paletteuse_options[]
Definition: vf_paletteuse.c:111
ff_dlog
#define ff_dlog(a,...)
Definition: tableprint_vlc.h:29
PaletteUseContext::trans_thresh
int trans_thresh
Definition: vf_paletteuse.c:90
qsort.h
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:117
ff_framesync_init_dualinput
int ff_framesync_init_dualinput(FFFrameSync *fs, AVFilterContext *parent)
Initialize a frame sync structure for dualinput.
Definition: framesync.c:358
master
const char * master
Definition: vf_curves.c:119
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:443
nearest_color
Definition: vf_paletteuse.c:205
DITHERING_BAYER
@ DITHERING_BAYER
Definition: vf_paletteuse.c:37
av_err2str
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: error.h:119
DITHERING_FLOYD_STEINBERG
@ DITHERING_FLOYD_STEINBERG
Definition: vf_paletteuse.c:39
PaletteUseContext::palette
uint32_t palette[AVPALETTE_COUNT]
Definition: vf_paletteuse.c:88
color
static const uint32_t color[16+AV_CLASS_CATEGORY_NB]
Definition: log.c:92
DITHERING_SIERRA2
@ DITHERING_SIERRA2
Definition: vf_paletteuse.c:40
PaletteUseContext::fs
FFFrameSync fs
Definition: vf_paletteuse.c:85
split
static char * split(char *message, char delim)
Definition: af_channelmap.c:81
height
#define height
AV_PIX_FMT_RGB32
#define AV_PIX_FMT_RGB32
Definition: pixfmt.h:372
a
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:41
color_get
static av_always_inline int color_get(PaletteUseContext *s, uint32_t color, uint8_t a, uint8_t r, uint8_t g, uint8_t b, const enum color_search_method search_method)
Check if the requested color is in the cache already.
Definition: vf_paletteuse.c:336
DITHERING_SIERRA2_4A
@ DITHERING_SIERRA2_4A
Definition: vf_paletteuse.c:41
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:205
PaletteUseContext::transparency_index
int transparency_index
Definition: vf_paletteuse.c:89
internal.h
DECLARE_CMP_FUNC
#define DECLARE_CMP_FUNC(name, pos)
Definition: vf_paletteuse.c:595
activate
static int activate(AVFilterContext *ctx)
Definition: vf_paletteuse.c:1098
src1
#define src1
Definition: h264pred.c:140
OFFSET
#define OFFSET(x)
Definition: vf_paletteuse.c:109
in
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(const int16_t *) pi >> 8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(const int32_t *) pi >> 24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31)))) #define SET_CONV_FUNC_GROUP(ofmt, ifmt) static void set_generic_function(AudioConvert *ac) { } void ff_audio_convert_free(AudioConvert **ac) { if(! *ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);} AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, int sample_rate, int apply_map) { AudioConvert *ac;int in_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) return NULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method !=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt) > 2) { ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc) { av_free(ac);return NULL;} return ac;} in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar) { ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar ? ac->channels :1;} else if(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;else ac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);return ac;} int ff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in) { int use_generic=1;int len=in->nb_samples;int p;if(ac->dc) { av_log(ac->avr, AV_LOG_TRACE, "%d samples - audio_convert: %s to %s (dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));return ff_convert_dither(ac-> in
Definition: audio_convert.c:326
bprint.h
i
int i
Definition: input.c:407
cache_node
Definition: vf_paletteuse.c:73
AV_QSORT
#define AV_QSORT(p, num, type, cmp)
Quicksort This sort is fast, and fully inplace but not stable and it is possible to construct input t...
Definition: qsort.h:33
internal.h
cmp_pal_entry
static int cmp_pal_entry(const void *a, const void *b)
Definition: vf_paletteuse.c:720
delta
float delta
Definition: vorbis_enc_data.h:457
av_always_inline
#define av_always_inline
Definition: attributes.h:49
cache_node::entries
struct cached_color * entries
Definition: vf_paletteuse.c:74
uint8_t
uint8_t
Definition: audio_convert.c:194
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:553
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:60
PaletteUseContext::diff_mode
int diff_mode
Definition: vf_paletteuse.c:97
color_node::split
int split
Definition: vf_paletteuse.c:61
cached_color::pal_entry
uint8_t pal_entry
Definition: vf_paletteuse.c:70
load_colormap
static void load_colormap(PaletteUseContext *s)
Definition: vf_paletteuse.c:727
PaletteUseContext::total_mean_err
uint64_t total_mean_err
Definition: vf_paletteuse.c:105
diff_mode
diff_mode
Definition: vf_paletteuse.c:52
FLAGS
#define FLAGS
Definition: vf_paletteuse.c:110
AVFilter
Filter definition.
Definition: avfilter.h:145
cache_node::nb_entries
int nb_entries
Definition: vf_paletteuse.c:75
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
ret
ret
Definition: filter_design.txt:187
PaletteUseContext::color_search_method
int color_search_method
Definition: vf_paletteuse.c:103
pos
unsigned int pos
Definition: spdifenc.c:412
av_bprintf
void av_bprintf(AVBPrint *buf, const char *fmt,...)
Definition: bprint.c:94
set_frame
static av_always_inline int set_frame(PaletteUseContext *s, AVFrame *out, AVFrame *in, int x_start, int y_start, int w, int h, enum dithering_mode dither, const enum color_search_method search_method)
Definition: vf_paletteuse.c:389
left
Tag MUST be and< 10hcoeff half pel interpolation filter coefficients, hcoeff[0] are the 2 middle coefficients[1] are the next outer ones and so on, resulting in a filter like:...eff[2], hcoeff[1], hcoeff[0], hcoeff[0], hcoeff[1], hcoeff[2] ... the sign of the coefficients is not explicitly stored but alternates after each coeff and coeff[0] is positive, so ...,+,-,+,-,+,+,-,+,-,+,... hcoeff[0] is not explicitly stored but found by subtracting the sum of all stored coefficients with signs from 32 hcoeff[0]=32 - hcoeff[1] - hcoeff[2] - ... a good choice for hcoeff and htaps is htaps=6 hcoeff={40,-10, 2} an alternative which requires more computations at both encoder and decoder side and may or may not be better is htaps=8 hcoeff={42,-14, 6,-2}ref_frames minimum of the number of available reference frames and max_ref_frames for example the first frame after a key frame always has ref_frames=1spatial_decomposition_type wavelet type 0 is a 9/7 symmetric compact integer wavelet 1 is a 5/3 symmetric compact integer wavelet others are reserved stored as delta from last, last is reset to 0 if always_reset||keyframeqlog quality(logarithmic quantizer scale) stored as delta from last, last is reset to 0 if always_reset||keyframemv_scale stored as delta from last, last is reset to 0 if always_reset||keyframe FIXME check that everything works fine if this changes between framesqbias dequantization bias stored as delta from last, last is reset to 0 if always_reset||keyframeblock_max_depth maximum depth of the block tree stored as delta from last, last is reset to 0 if always_reset||keyframequant_table quantization tableHighlevel bitstream structure:==============================--------------------------------------------|Header|--------------------------------------------|------------------------------------|||Block0||||split?||||yes no||||......... intra?||||:Block01 :yes no||||:Block02 :....... ..........||||:Block03 ::y DC ::ref index:||||:Block04 ::cb DC ::motion x :||||......... :cr DC ::motion y :||||....... ..........|||------------------------------------||------------------------------------|||Block1|||...|--------------------------------------------|------------ ------------ ------------|||Y subbands||Cb subbands||Cr subbands||||--- ---||--- ---||--- ---|||||LL0||HL0||||LL0||HL0||||LL0||HL0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||LH0||HH0||||LH0||HH0||||LH0||HH0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HL1||LH1||||HL1||LH1||||HL1||LH1|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HH1||HL2||||HH1||HL2||||HH1||HL2|||||...||...||...|||------------ ------------ ------------|--------------------------------------------Decoding process:=================------------|||Subbands|------------||||------------|Intra DC||||LL0 subband prediction ------------|\ Dequantization ------------------- \||Reference frames|\ IDWT|------- -------|Motion \|||Frame 0||Frame 1||Compensation . OBMC v -------|------- -------|--------------. \------> Frame n output Frame Frame<----------------------------------/|...|------------------- Range Coder:============Binary Range Coder:------------------- The implemented range coder is an adapted version based upon "Range encoding: an algorithm for removing redundancy from a digitised message." by G. N. N. Martin. The symbols encoded by the Snow range coder are bits(0|1). The associated probabilities are not fix but change depending on the symbol mix seen so far. bit seen|new state ---------+----------------------------------------------- 0|256 - state_transition_table[256 - old_state];1|state_transition_table[old_state];state_transition_table={ 0, 0, 0, 0, 0, 0, 0, 0, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 190, 191, 192, 194, 194, 195, 196, 197, 198, 199, 200, 201, 202, 202, 204, 205, 206, 207, 208, 209, 209, 210, 211, 212, 213, 215, 215, 216, 217, 218, 219, 220, 220, 222, 223, 224, 225, 226, 227, 227, 229, 229, 230, 231, 232, 234, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 248, 0, 0, 0, 0, 0, 0, 0};FIXME Range Coding of integers:------------------------- FIXME Neighboring Blocks:===================left and top are set to the respective blocks unless they are outside of the image in which case they are set to the Null block top-left is set to the top left block unless it is outside of the image in which case it is set to the left block if this block has no larger parent block or it is at the left side of its parent block and the top right block is not outside of the image then the top right block is used for top-right else the top-left block is used Null block y, cb, cr are 128 level, ref, mx and my are 0 Motion Vector Prediction:=========================1. the motion vectors of all the neighboring blocks are scaled to compensate for the difference of reference frames scaled_mv=(mv *(256 *(current_reference+1)/(mv.reference+1))+128)> the median of the scaled left
Definition: snow.txt:386
NBITS
#define NBITS
Definition: vf_paletteuse.c:65
AVFrame::height
int height
Definition: frame.h:376
c2
static const uint64_t c2
Definition: murmur3.c:52
framesync.h
DIFF_MODE_RECTANGLE
@ DIFF_MODE_RECTANGLE
Definition: vf_paletteuse.c:54
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
COLOR_SEARCH_NNS_ITERATIVE
@ COLOR_SEARCH_NNS_ITERATIVE
Definition: vf_paletteuse.c:46
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:225
avfilter.h
cmp_func
int(* cmp_func)(const void *, const void *)
Definition: vf_paletteuse.c:593
PaletteUseContext::map
struct color_node map[AVPALETTE_COUNT]
Definition: vf_paletteuse.c:87
COLOR_SEARCH_NNS_RECURSIVE
@ COLOR_SEARCH_NNS_RECURSIVE
Definition: vf_paletteuse.c:47
PaletteUseContext::debug_accuracy
int debug_accuracy
Definition: vf_paletteuse.c:106
av_clip_uint8
#define av_clip_uint8
Definition: common.h:128
AVFilterContext
An instance of a filter.
Definition: avfilter.h:341
shift
static int shift(int a, int b)
Definition: sonic.c:82
color_node::val
uint8_t val[4]
Definition: vf_paletteuse.c:59
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
CACHE_SIZE
#define CACHE_SIZE
Definition: vf_paletteuse.c:66
map
const VDPAUPixFmtMap * map
Definition: hwcontext_vdpau.c:71
color::pal_id
uint8_t pal_id
Definition: vf_paletteuse.c:585
NB_COLOR_SEARCHES
@ NB_COLOR_SEARCHES
Definition: vf_paletteuse.c:49
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Definition: opt.h:242
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
color::value
uint32_t value
Definition: vf_paletteuse.c:584
paletteuse_outputs
static const AVFilterPad paletteuse_outputs[]
Definition: vf_paletteuse.c:1128
PaletteUseContext::calc_mean_err
int calc_mean_err
Definition: vf_paletteuse.c:104
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:50
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:349
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
color_rect::min
uint8_t min[3]
Definition: vf_paletteuse.c:589
h
h
Definition: vp9dsp_template.c:2038
ff_framesync_activate
int ff_framesync_activate(FFFrameSync *fs)
Examine the frames in the filter's input and try to produce output.
Definition: framesync.c:341
color_node::left_id
int left_id
Definition: vf_paletteuse.c:62
color_node
Definition: vf_paletteuse.c:58
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(paletteuse)
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Definition: opt.h:229
ff_framesync_dualinput_get_writable
int ff_framesync_dualinput_get_writable(FFFrameSync *fs, AVFrame **f0, AVFrame **f1)
Same as ff_framesync_dualinput_get(), but make sure that f0 is writable.
Definition: framesync.c:396
int
int
Definition: ffmpeg_filter.c:170
PaletteUseContext::last_in
AVFrame * last_in
Definition: vf_paletteuse.c:98
nearest_color::node_pos
int node_pos
Definition: vf_paletteuse.c:206
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:234
paletteuse_inputs
static const AVFilterPad paletteuse_inputs[]
Definition: vf_paletteuse.c:1116
load_palette
static void load_palette(PaletteUseContext *s, const AVFrame *palette_frame)
Definition: vf_paletteuse.c:971
cached_color
Definition: vf_paletteuse.c:68
color_search_method
color_search_method
Definition: vf_paletteuse.c:45
min
float min
Definition: vorbis_enc_data.h:456
NB_DIFF_MODE
@ NB_DIFF_MODE
Definition: vf_paletteuse.c:55
dither
static const uint8_t dither[8][8]
Definition: vf_fspp.c:59