FFmpeg
vf_paletteuse.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2015 Stupeflix
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 /**
22  * @file
23  * Use a palette to downsample an input video stream.
24  */
25 
26 #include "libavutil/bprint.h"
27 #include "libavutil/internal.h"
28 #include "libavutil/opt.h"
29 #include "libavutil/qsort.h"
30 #include "avfilter.h"
31 #include "filters.h"
32 #include "framesync.h"
33 #include "internal.h"
34 
43 };
44 
50 };
51 
52 enum diff_mode {
56 };
57 
58 struct color_node {
61  int split;
63 };
64 
65 #define NBITS 5
66 #define CACHE_SIZE (1<<(3*NBITS))
67 
68 struct cached_color {
69  uint32_t color;
71 };
72 
73 struct cache_node {
76 };
77 
78 struct PaletteUseContext;
79 
81  int x_start, int y_start, int width, int height);
82 
83 typedef struct PaletteUseContext {
84  const AVClass *class;
86  struct cache_node cache[CACHE_SIZE]; /* lookup cache */
87  struct color_node map[AVPALETTE_COUNT]; /* 3D-Tree (KD-Tree with K=3) for reverse colormap */
89  int transparency_index; /* index in the palette of transparency. -1 if there is no transparency in the palette. */
92  int dither;
93  int new;
96  int ordered_dither[8*8];
97  int diff_mode;
100 
101  /* debug options */
105  uint64_t total_mean_err;
108 
109 #define OFFSET(x) offsetof(PaletteUseContext, x)
110 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
111 static const AVOption paletteuse_options[] = {
112  { "dither", "select dithering mode", OFFSET(dither), AV_OPT_TYPE_INT, {.i64=DITHERING_SIERRA2_4A}, 0, NB_DITHERING-1, FLAGS, "dithering_mode" },
113  { "bayer", "ordered 8x8 bayer dithering (deterministic)", 0, AV_OPT_TYPE_CONST, {.i64=DITHERING_BAYER}, INT_MIN, INT_MAX, FLAGS, "dithering_mode" },
114  { "heckbert", "dithering as defined by Paul Heckbert in 1982 (simple error diffusion)", 0, AV_OPT_TYPE_CONST, {.i64=DITHERING_HECKBERT}, INT_MIN, INT_MAX, FLAGS, "dithering_mode" },
115  { "floyd_steinberg", "Floyd and Steingberg dithering (error diffusion)", 0, AV_OPT_TYPE_CONST, {.i64=DITHERING_FLOYD_STEINBERG}, INT_MIN, INT_MAX, FLAGS, "dithering_mode" },
116  { "sierra2", "Frankie Sierra dithering v2 (error diffusion)", 0, AV_OPT_TYPE_CONST, {.i64=DITHERING_SIERRA2}, INT_MIN, INT_MAX, FLAGS, "dithering_mode" },
117  { "sierra2_4a", "Frankie Sierra dithering v2 \"Lite\" (error diffusion)", 0, AV_OPT_TYPE_CONST, {.i64=DITHERING_SIERRA2_4A}, INT_MIN, INT_MAX, FLAGS, "dithering_mode" },
118  { "bayer_scale", "set scale for bayer dithering", OFFSET(bayer_scale), AV_OPT_TYPE_INT, {.i64=2}, 0, 5, FLAGS },
119  { "diff_mode", "set frame difference mode", OFFSET(diff_mode), AV_OPT_TYPE_INT, {.i64=DIFF_MODE_NONE}, 0, NB_DIFF_MODE-1, FLAGS, "diff_mode" },
120  { "rectangle", "process smallest different rectangle", 0, AV_OPT_TYPE_CONST, {.i64=DIFF_MODE_RECTANGLE}, INT_MIN, INT_MAX, FLAGS, "diff_mode" },
121  { "new", "take new palette for each output frame", OFFSET(new), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS },
122  { "alpha_threshold", "set the alpha threshold for transparency", OFFSET(trans_thresh), AV_OPT_TYPE_INT, {.i64=128}, 0, 255, FLAGS },
123 
124  /* following are the debug options, not part of the official API */
125  { "debug_kdtree", "save Graphviz graph of the kdtree in specified file", OFFSET(dot_filename), AV_OPT_TYPE_STRING, {.str=NULL}, 0, 0, FLAGS },
126  { "color_search", "set reverse colormap color search method", OFFSET(color_search_method), AV_OPT_TYPE_INT, {.i64=COLOR_SEARCH_NNS_ITERATIVE}, 0, NB_COLOR_SEARCHES-1, FLAGS, "search" },
127  { "nns_iterative", "iterative search", 0, AV_OPT_TYPE_CONST, {.i64=COLOR_SEARCH_NNS_ITERATIVE}, INT_MIN, INT_MAX, FLAGS, "search" },
128  { "nns_recursive", "recursive search", 0, AV_OPT_TYPE_CONST, {.i64=COLOR_SEARCH_NNS_RECURSIVE}, INT_MIN, INT_MAX, FLAGS, "search" },
129  { "bruteforce", "brute-force into the palette", 0, AV_OPT_TYPE_CONST, {.i64=COLOR_SEARCH_BRUTEFORCE}, INT_MIN, INT_MAX, FLAGS, "search" },
130  { "mean_err", "compute and print mean error", OFFSET(calc_mean_err), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS },
131  { "debug_accuracy", "test color search accuracy", OFFSET(debug_accuracy), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS },
132  { NULL }
133 };
134 
135 AVFILTER_DEFINE_CLASS(paletteuse);
136 
137 static int load_apply_palette(FFFrameSync *fs);
138 
140 {
141  static const enum AVPixelFormat in_fmts[] = {AV_PIX_FMT_RGB32, AV_PIX_FMT_NONE};
142  static const enum AVPixelFormat inpal_fmts[] = {AV_PIX_FMT_RGB32, AV_PIX_FMT_NONE};
143  static const enum AVPixelFormat out_fmts[] = {AV_PIX_FMT_PAL8, AV_PIX_FMT_NONE};
144  int ret;
145  if ((ret = ff_formats_ref(ff_make_format_list(in_fmts),
146  &ctx->inputs[0]->out_formats)) < 0 ||
147  (ret = ff_formats_ref(ff_make_format_list(inpal_fmts),
148  &ctx->inputs[1]->out_formats)) < 0 ||
150  &ctx->outputs[0]->in_formats)) < 0)
151  return ret;
152  return 0;
153 }
154 
155 static av_always_inline int dither_color(uint32_t px, int er, int eg, int eb, int scale, int shift)
156 {
157  return av_clip_uint8( px >> 24 ) << 24
158  | av_clip_uint8((px >> 16 & 0xff) + ((er * scale) / (1<<shift))) << 16
159  | av_clip_uint8((px >> 8 & 0xff) + ((eg * scale) / (1<<shift))) << 8
160  | av_clip_uint8((px & 0xff) + ((eb * scale) / (1<<shift)));
161 }
162 
163 static av_always_inline int diff(const uint8_t *c1, const uint8_t *c2, const int trans_thresh)
164 {
165  // XXX: try L*a*b with CIE76 (dL*dL + da*da + db*db)
166  const int dr = c1[1] - c2[1];
167  const int dg = c1[2] - c2[2];
168  const int db = c1[3] - c2[3];
169 
170  if (c1[0] < trans_thresh && c2[0] < trans_thresh) {
171  return 0;
172  } else if (c1[0] >= trans_thresh && c2[0] >= trans_thresh) {
173  return dr*dr + dg*dg + db*db;
174  } else {
175  return 255*255 + 255*255 + 255*255;
176  }
177 }
178 
179 static av_always_inline uint8_t colormap_nearest_bruteforce(const uint32_t *palette, const uint8_t *argb, const int trans_thresh)
180 {
181  int i, pal_id = -1, min_dist = INT_MAX;
182 
183  for (i = 0; i < AVPALETTE_COUNT; i++) {
184  const uint32_t c = palette[i];
185 
186  if (c >> 24 >= trans_thresh) { // ignore transparent entry
187  const uint8_t palargb[] = {
188  palette[i]>>24 & 0xff,
189  palette[i]>>16 & 0xff,
190  palette[i]>> 8 & 0xff,
191  palette[i] & 0xff,
192  };
193  const int d = diff(palargb, argb, trans_thresh);
194  if (d < min_dist) {
195  pal_id = i;
196  min_dist = d;
197  }
198  }
199  }
200  return pal_id;
201 }
202 
203 /* Recursive form, simpler but a bit slower. Kept for reference. */
205  int node_pos;
206  int dist_sqd;
207 };
208 
209 static void colormap_nearest_node(const struct color_node *map,
210  const int node_pos,
211  const uint8_t *target,
212  const int trans_thresh,
213  struct nearest_color *nearest)
214 {
215  const struct color_node *kd = map + node_pos;
216  const int s = kd->split;
217  int dx, nearer_kd_id, further_kd_id;
218  const uint8_t *current = kd->val;
219  const int current_to_target = diff(target, current, trans_thresh);
220 
221  if (current_to_target < nearest->dist_sqd) {
222  nearest->node_pos = node_pos;
223  nearest->dist_sqd = current_to_target;
224  }
225 
226  if (kd->left_id != -1 || kd->right_id != -1) {
227  dx = target[s] - current[s];
228 
229  if (dx <= 0) nearer_kd_id = kd->left_id, further_kd_id = kd->right_id;
230  else nearer_kd_id = kd->right_id, further_kd_id = kd->left_id;
231 
232  if (nearer_kd_id != -1)
233  colormap_nearest_node(map, nearer_kd_id, target, trans_thresh, nearest);
234 
235  if (further_kd_id != -1 && dx*dx < nearest->dist_sqd)
236  colormap_nearest_node(map, further_kd_id, target, trans_thresh, nearest);
237  }
238 }
239 
240 static av_always_inline uint8_t colormap_nearest_recursive(const struct color_node *node, const uint8_t *rgb, const int trans_thresh)
241 {
242  struct nearest_color res = {.dist_sqd = INT_MAX, .node_pos = -1};
243  colormap_nearest_node(node, 0, rgb, trans_thresh, &res);
244  return node[res.node_pos].palette_id;
245 }
246 
247 struct stack_node {
248  int color_id;
249  int dx2;
250 };
251 
252 static av_always_inline uint8_t colormap_nearest_iterative(const struct color_node *root, const uint8_t *target, const int trans_thresh)
253 {
254  int pos = 0, best_node_id = -1, best_dist = INT_MAX, cur_color_id = 0;
255  struct stack_node nodes[16];
256  struct stack_node *node = &nodes[0];
257 
258  for (;;) {
259 
260  const struct color_node *kd = &root[cur_color_id];
261  const uint8_t *current = kd->val;
262  const int current_to_target = diff(target, current, trans_thresh);
263 
264  /* Compare current color node to the target and update our best node if
265  * it's actually better. */
266  if (current_to_target < best_dist) {
267  best_node_id = cur_color_id;
268  if (!current_to_target)
269  goto end; // exact match, we can return immediately
270  best_dist = current_to_target;
271  }
272 
273  /* Check if it's not a leaf */
274  if (kd->left_id != -1 || kd->right_id != -1) {
275  const int split = kd->split;
276  const int dx = target[split] - current[split];
277  int nearer_kd_id, further_kd_id;
278 
279  /* Define which side is the most interesting. */
280  if (dx <= 0) nearer_kd_id = kd->left_id, further_kd_id = kd->right_id;
281  else nearer_kd_id = kd->right_id, further_kd_id = kd->left_id;
282 
283  if (nearer_kd_id != -1) {
284  if (further_kd_id != -1) {
285  /* Here, both paths are defined, so we push a state for
286  * when we are going back. */
287  node->color_id = further_kd_id;
288  node->dx2 = dx*dx;
289  pos++;
290  node++;
291  }
292  /* We can now update current color with the most probable path
293  * (no need to create a state since there is nothing to save
294  * anymore). */
295  cur_color_id = nearer_kd_id;
296  continue;
297  } else if (dx*dx < best_dist) {
298  /* The nearest path isn't available, so there is only one path
299  * possible and it's the least probable. We enter it only if the
300  * distance from the current point to the hyper rectangle is
301  * less than our best distance. */
302  cur_color_id = further_kd_id;
303  continue;
304  }
305  }
306 
307  /* Unstack as much as we can, typically as long as the least probable
308  * branch aren't actually probable. */
309  do {
310  if (--pos < 0)
311  goto end;
312  node--;
313  } while (node->dx2 >= best_dist);
314 
315  /* We got a node where the least probable branch might actually contain
316  * a relevant color. */
317  cur_color_id = node->color_id;
318  }
319 
320 end:
321  return root[best_node_id].palette_id;
322 }
323 
324 #define COLORMAP_NEAREST(search, palette, root, target, trans_thresh) \
325  search == COLOR_SEARCH_NNS_ITERATIVE ? colormap_nearest_iterative(root, target, trans_thresh) : \
326  search == COLOR_SEARCH_NNS_RECURSIVE ? colormap_nearest_recursive(root, target, trans_thresh) : \
327  colormap_nearest_bruteforce(palette, target, trans_thresh)
328 
329 /**
330  * Check if the requested color is in the cache already. If not, find it in the
331  * color tree and cache it.
332  * Note: a, r, g, and b are the components of color, but are passed as well to avoid
333  * recomputing them (they are generally computed by the caller for other uses).
334  */
337  const enum color_search_method search_method)
338 {
339  int i;
340  const uint8_t argb_elts[] = {a, r, g, b};
341  const uint8_t rhash = r & ((1<<NBITS)-1);
342  const uint8_t ghash = g & ((1<<NBITS)-1);
343  const uint8_t bhash = b & ((1<<NBITS)-1);
344  const unsigned hash = rhash<<(NBITS*2) | ghash<<NBITS | bhash;
345  struct cache_node *node = &s->cache[hash];
346  struct cached_color *e;
347 
348  // first, check for transparency
349  if (a < s->trans_thresh && s->transparency_index >= 0) {
350  return s->transparency_index;
351  }
352 
353  for (i = 0; i < node->nb_entries; i++) {
354  e = &node->entries[i];
355  if (e->color == color)
356  return e->pal_entry;
357  }
358 
359  e = av_dynarray2_add((void**)&node->entries, &node->nb_entries,
360  sizeof(*node->entries), NULL);
361  if (!e)
362  return AVERROR(ENOMEM);
363  e->color = color;
364  e->pal_entry = COLORMAP_NEAREST(search_method, s->palette, s->map, argb_elts, s->trans_thresh);
365 
366  return e->pal_entry;
367 }
368 
370  uint32_t c, int *er, int *eg, int *eb,
371  const enum color_search_method search_method)
372 {
373  const uint8_t a = c >> 24 & 0xff;
374  const uint8_t r = c >> 16 & 0xff;
375  const uint8_t g = c >> 8 & 0xff;
376  const uint8_t b = c & 0xff;
377  uint32_t dstc;
378  const int dstx = color_get(s, c, a, r, g, b, search_method);
379  if (dstx < 0)
380  return dstx;
381  dstc = s->palette[dstx];
382  *er = r - (dstc >> 16 & 0xff);
383  *eg = g - (dstc >> 8 & 0xff);
384  *eb = b - (dstc & 0xff);
385  return dstx;
386 }
387 
389  int x_start, int y_start, int w, int h,
390  enum dithering_mode dither,
391  const enum color_search_method search_method)
392 {
393  int x, y;
394  const int src_linesize = in ->linesize[0] >> 2;
395  const int dst_linesize = out->linesize[0];
396  uint32_t *src = ((uint32_t *)in ->data[0]) + y_start*src_linesize;
397  uint8_t *dst = out->data[0] + y_start*dst_linesize;
398 
399  w += x_start;
400  h += y_start;
401 
402  for (y = y_start; y < h; y++) {
403  for (x = x_start; x < w; x++) {
404  int er, eg, eb;
405 
406  if (dither == DITHERING_BAYER) {
407  const int d = s->ordered_dither[(y & 7)<<3 | (x & 7)];
408  const uint8_t a8 = src[x] >> 24 & 0xff;
409  const uint8_t r8 = src[x] >> 16 & 0xff;
410  const uint8_t g8 = src[x] >> 8 & 0xff;
411  const uint8_t b8 = src[x] & 0xff;
412  const uint8_t r = av_clip_uint8(r8 + d);
413  const uint8_t g = av_clip_uint8(g8 + d);
414  const uint8_t b = av_clip_uint8(b8 + d);
415  const int color = color_get(s, src[x], a8, r, g, b, search_method);
416 
417  if (color < 0)
418  return color;
419  dst[x] = color;
420 
421  } else if (dither == DITHERING_HECKBERT) {
422  const int right = x < w - 1, down = y < h - 1;
423  const int color = get_dst_color_err(s, src[x], &er, &eg, &eb, search_method);
424 
425  if (color < 0)
426  return color;
427  dst[x] = color;
428 
429  if (right) src[ x + 1] = dither_color(src[ x + 1], er, eg, eb, 3, 3);
430  if ( down) src[src_linesize + x ] = dither_color(src[src_linesize + x ], er, eg, eb, 3, 3);
431  if (right && down) src[src_linesize + x + 1] = dither_color(src[src_linesize + x + 1], er, eg, eb, 2, 3);
432 
433  } else if (dither == DITHERING_FLOYD_STEINBERG) {
434  const int right = x < w - 1, down = y < h - 1, left = x > x_start;
435  const int color = get_dst_color_err(s, src[x], &er, &eg, &eb, search_method);
436 
437  if (color < 0)
438  return color;
439  dst[x] = color;
440 
441  if (right) src[ x + 1] = dither_color(src[ x + 1], er, eg, eb, 7, 4);
442  if (left && down) src[src_linesize + x - 1] = dither_color(src[src_linesize + x - 1], er, eg, eb, 3, 4);
443  if ( down) src[src_linesize + x ] = dither_color(src[src_linesize + x ], er, eg, eb, 5, 4);
444  if (right && down) src[src_linesize + x + 1] = dither_color(src[src_linesize + x + 1], er, eg, eb, 1, 4);
445 
446  } else if (dither == DITHERING_SIERRA2) {
447  const int right = x < w - 1, down = y < h - 1, left = x > x_start;
448  const int right2 = x < w - 2, left2 = x > x_start + 1;
449  const int color = get_dst_color_err(s, src[x], &er, &eg, &eb, search_method);
450 
451  if (color < 0)
452  return color;
453  dst[x] = color;
454 
455  if (right) src[ x + 1] = dither_color(src[ x + 1], er, eg, eb, 4, 4);
456  if (right2) src[ x + 2] = dither_color(src[ x + 2], er, eg, eb, 3, 4);
457 
458  if (down) {
459  if (left2) src[ src_linesize + x - 2] = dither_color(src[ src_linesize + x - 2], er, eg, eb, 1, 4);
460  if (left) src[ src_linesize + x - 1] = dither_color(src[ src_linesize + x - 1], er, eg, eb, 2, 4);
461  if (1) src[ src_linesize + x ] = dither_color(src[ src_linesize + x ], er, eg, eb, 3, 4);
462  if (right) src[ src_linesize + x + 1] = dither_color(src[ src_linesize + x + 1], er, eg, eb, 2, 4);
463  if (right2) src[ src_linesize + x + 2] = dither_color(src[ src_linesize + x + 2], er, eg, eb, 1, 4);
464  }
465 
466  } else if (dither == DITHERING_SIERRA2_4A) {
467  const int right = x < w - 1, down = y < h - 1, left = x > x_start;
468  const int color = get_dst_color_err(s, src[x], &er, &eg, &eb, search_method);
469 
470  if (color < 0)
471  return color;
472  dst[x] = color;
473 
474  if (right) src[ x + 1] = dither_color(src[ x + 1], er, eg, eb, 2, 2);
475  if (left && down) src[src_linesize + x - 1] = dither_color(src[src_linesize + x - 1], er, eg, eb, 1, 2);
476  if ( down) src[src_linesize + x ] = dither_color(src[src_linesize + x ], er, eg, eb, 1, 2);
477 
478  } else {
479  const uint8_t a = src[x] >> 24 & 0xff;
480  const uint8_t r = src[x] >> 16 & 0xff;
481  const uint8_t g = src[x] >> 8 & 0xff;
482  const uint8_t b = src[x] & 0xff;
483  const int color = color_get(s, src[x], a, r, g, b, search_method);
484 
485  if (color < 0)
486  return color;
487  dst[x] = color;
488  }
489  }
490  src += src_linesize;
491  dst += dst_linesize;
492  }
493  return 0;
494 }
495 
496 #define INDENT 4
497 static void disp_node(AVBPrint *buf,
498  const struct color_node *map,
499  int parent_id, int node_id,
500  int depth)
501 {
502  const struct color_node *node = &map[node_id];
503  const uint32_t fontcolor = node->val[1] > 0x50 &&
504  node->val[2] > 0x50 &&
505  node->val[3] > 0x50 ? 0 : 0xffffff;
506  const int rgb_comp = node->split - 1;
507  av_bprintf(buf, "%*cnode%d ["
508  "label=\"%c%02X%c%02X%c%02X%c\" "
509  "fillcolor=\"#%02x%02x%02x\" "
510  "fontcolor=\"#%06"PRIX32"\"]\n",
511  depth*INDENT, ' ', node->palette_id,
512  "[ "[rgb_comp], node->val[1],
513  "][ "[rgb_comp], node->val[2],
514  " ]["[rgb_comp], node->val[3],
515  " ]"[rgb_comp],
516  node->val[1], node->val[2], node->val[3],
517  fontcolor);
518  if (parent_id != -1)
519  av_bprintf(buf, "%*cnode%d -> node%d\n", depth*INDENT, ' ',
520  map[parent_id].palette_id, node->palette_id);
521  if (node->left_id != -1) disp_node(buf, map, node_id, node->left_id, depth + 1);
522  if (node->right_id != -1) disp_node(buf, map, node_id, node->right_id, depth + 1);
523 }
524 
525 // debug_kdtree=kdtree.dot -> dot -Tpng kdtree.dot > kdtree.png
526 static int disp_tree(const struct color_node *node, const char *fname)
527 {
528  AVBPrint buf;
529  FILE *f = av_fopen_utf8(fname, "w");
530 
531  if (!f) {
532  int ret = AVERROR(errno);
533  av_log(NULL, AV_LOG_ERROR, "Cannot open file '%s' for writing: %s\n",
534  fname, av_err2str(ret));
535  return ret;
536  }
537 
539 
540  av_bprintf(&buf, "digraph {\n");
541  av_bprintf(&buf, " node [style=filled fontsize=10 shape=box]\n");
542  disp_node(&buf, node, -1, 0, 0);
543  av_bprintf(&buf, "}\n");
544 
545  fwrite(buf.str, 1, buf.len, f);
546  fclose(f);
547  av_bprint_finalize(&buf, NULL);
548  return 0;
549 }
550 
551 static int debug_accuracy(const struct color_node *node, const uint32_t *palette, const int trans_thresh,
552  const enum color_search_method search_method)
553 {
554  int r, g, b, ret = 0;
555 
556  for (r = 0; r < 256; r++) {
557  for (g = 0; g < 256; g++) {
558  for (b = 0; b < 256; b++) {
559  const uint8_t argb[] = {0xff, r, g, b};
560  const int r1 = COLORMAP_NEAREST(search_method, palette, node, argb, trans_thresh);
561  const int r2 = colormap_nearest_bruteforce(palette, argb, trans_thresh);
562  if (r1 != r2) {
563  const uint32_t c1 = palette[r1];
564  const uint32_t c2 = palette[r2];
565  const uint8_t palargb1[] = { 0xff, c1>>16 & 0xff, c1>> 8 & 0xff, c1 & 0xff };
566  const uint8_t palargb2[] = { 0xff, c2>>16 & 0xff, c2>> 8 & 0xff, c2 & 0xff };
567  const int d1 = diff(palargb1, argb, trans_thresh);
568  const int d2 = diff(palargb2, argb, trans_thresh);
569  if (d1 != d2) {
571  "/!\\ %02X%02X%02X: %d ! %d (%06"PRIX32" ! %06"PRIX32") / dist: %d ! %d\n",
572  r, g, b, r1, r2, c1 & 0xffffff, c2 & 0xffffff, d1, d2);
573  ret = 1;
574  }
575  }
576  }
577  }
578  }
579  return ret;
580 }
581 
582 struct color {
583  uint32_t value;
585 };
586 
587 struct color_rect {
590 };
591 
592 typedef int (*cmp_func)(const void *, const void *);
593 
594 #define DECLARE_CMP_FUNC(name, pos) \
595 static int cmp_##name(const void *pa, const void *pb) \
596 { \
597  const struct color *a = pa; \
598  const struct color *b = pb; \
599  return (a->value >> (8 * (3 - (pos))) & 0xff) \
600  - (b->value >> (8 * (3 - (pos))) & 0xff); \
601 }
602 
607 
608 static const cmp_func cmp_funcs[] = {cmp_a, cmp_r, cmp_g, cmp_b};
609 
610 static int get_next_color(const uint8_t *color_used, const uint32_t *palette,
611  const int trans_thresh,
612  int *component, const struct color_rect *box)
613 {
614  int wr, wg, wb;
615  int i, longest = 0;
616  unsigned nb_color = 0;
617  struct color_rect ranges;
618  struct color tmp_pal[256];
619  cmp_func cmpf;
620 
621  ranges.min[0] = ranges.min[1] = ranges.min[2] = 0xff;
622  ranges.max[0] = ranges.max[1] = ranges.max[2] = 0x00;
623 
624  for (i = 0; i < AVPALETTE_COUNT; i++) {
625  const uint32_t c = palette[i];
626  const uint8_t a = c >> 24 & 0xff;
627  const uint8_t r = c >> 16 & 0xff;
628  const uint8_t g = c >> 8 & 0xff;
629  const uint8_t b = c & 0xff;
630 
631  if (a < trans_thresh) {
632  continue;
633  }
634 
635  if (color_used[i] || (a != 0xff) ||
636  r < box->min[0] || g < box->min[1] || b < box->min[2] ||
637  r > box->max[0] || g > box->max[1] || b > box->max[2])
638  continue;
639 
640  if (r < ranges.min[0]) ranges.min[0] = r;
641  if (g < ranges.min[1]) ranges.min[1] = g;
642  if (b < ranges.min[2]) ranges.min[2] = b;
643 
644  if (r > ranges.max[0]) ranges.max[0] = r;
645  if (g > ranges.max[1]) ranges.max[1] = g;
646  if (b > ranges.max[2]) ranges.max[2] = b;
647 
648  tmp_pal[nb_color].value = c;
649  tmp_pal[nb_color].pal_id = i;
650 
651  nb_color++;
652  }
653 
654  if (!nb_color)
655  return -1;
656 
657  /* define longest axis that will be the split component */
658  wr = ranges.max[0] - ranges.min[0];
659  wg = ranges.max[1] - ranges.min[1];
660  wb = ranges.max[2] - ranges.min[2];
661  if (wr >= wg && wr >= wb) longest = 1;
662  if (wg >= wr && wg >= wb) longest = 2;
663  if (wb >= wr && wb >= wg) longest = 3;
664  cmpf = cmp_funcs[longest];
665  *component = longest;
666 
667  /* sort along this axis to get median */
668  AV_QSORT(tmp_pal, nb_color, struct color, cmpf);
669 
670  return tmp_pal[nb_color >> 1].pal_id;
671 }
672 
673 static int colormap_insert(struct color_node *map,
674  uint8_t *color_used,
675  int *nb_used,
676  const uint32_t *palette,
677  const int trans_thresh,
678  const struct color_rect *box)
679 {
680  uint32_t c;
681  int component, cur_id;
682  int node_left_id = -1, node_right_id = -1;
683  struct color_node *node;
684  struct color_rect box1, box2;
685  const int pal_id = get_next_color(color_used, palette, trans_thresh, &component, box);
686 
687  if (pal_id < 0)
688  return -1;
689 
690  /* create new node with that color */
691  cur_id = (*nb_used)++;
692  c = palette[pal_id];
693  node = &map[cur_id];
694  node->split = component;
695  node->palette_id = pal_id;
696  node->val[0] = c>>24 & 0xff;
697  node->val[1] = c>>16 & 0xff;
698  node->val[2] = c>> 8 & 0xff;
699  node->val[3] = c & 0xff;
700 
701  color_used[pal_id] = 1;
702 
703  /* get the two boxes this node creates */
704  box1 = box2 = *box;
705  box1.max[component-1] = node->val[component];
706  box2.min[component-1] = node->val[component] + 1;
707 
708  node_left_id = colormap_insert(map, color_used, nb_used, palette, trans_thresh, &box1);
709 
710  if (box2.min[component-1] <= box2.max[component-1])
711  node_right_id = colormap_insert(map, color_used, nb_used, palette, trans_thresh, &box2);
712 
713  node->left_id = node_left_id;
714  node->right_id = node_right_id;
715 
716  return cur_id;
717 }
718 
719 static int cmp_pal_entry(const void *a, const void *b)
720 {
721  const int c1 = *(const uint32_t *)a & 0xffffff;
722  const int c2 = *(const uint32_t *)b & 0xffffff;
723  return c1 - c2;
724 }
725 
727 {
728  int i, nb_used = 0;
729  uint8_t color_used[AVPALETTE_COUNT] = {0};
730  uint32_t last_color = 0;
731  struct color_rect box;
732 
733  /* disable transparent colors and dups */
734  qsort(s->palette, AVPALETTE_COUNT, sizeof(*s->palette), cmp_pal_entry);
735  // update transparency index:
736  if (s->transparency_index >= 0) {
737  for (i = 0; i < AVPALETTE_COUNT; i++) {
738  if ((s->palette[i]>>24 & 0xff) == 0) {
739  s->transparency_index = i; // we are assuming at most one transparent color in palette
740  break;
741  }
742  }
743  }
744 
745  for (i = 0; i < AVPALETTE_COUNT; i++) {
746  const uint32_t c = s->palette[i];
747  if (i != 0 && c == last_color) {
748  color_used[i] = 1;
749  continue;
750  }
751  last_color = c;
752  if (c >> 24 < s->trans_thresh) {
753  color_used[i] = 1; // ignore transparent color(s)
754  continue;
755  }
756  }
757 
758  box.min[0] = box.min[1] = box.min[2] = 0x00;
759  box.max[0] = box.max[1] = box.max[2] = 0xff;
760 
761  colormap_insert(s->map, color_used, &nb_used, s->palette, s->trans_thresh, &box);
762 
763  if (s->dot_filename)
764  disp_tree(s->map, s->dot_filename);
765 
766  if (s->debug_accuracy) {
767  if (!debug_accuracy(s->map, s->palette, s->trans_thresh, s->color_search_method))
768  av_log(NULL, AV_LOG_INFO, "Accuracy check passed\n");
769  }
770 }
771 
772 static void debug_mean_error(PaletteUseContext *s, const AVFrame *in1,
773  const AVFrame *in2, int frame_count)
774 {
775  int x, y;
776  const uint32_t *palette = s->palette;
777  uint32_t *src1 = (uint32_t *)in1->data[0];
778  uint8_t *src2 = in2->data[0];
779  const int src1_linesize = in1->linesize[0] >> 2;
780  const int src2_linesize = in2->linesize[0];
781  const float div = in1->width * in1->height * 3;
782  unsigned mean_err = 0;
783 
784  for (y = 0; y < in1->height; y++) {
785  for (x = 0; x < in1->width; x++) {
786  const uint32_t c1 = src1[x];
787  const uint32_t c2 = palette[src2[x]];
788  const uint8_t argb1[] = {0xff, c1 >> 16 & 0xff, c1 >> 8 & 0xff, c1 & 0xff};
789  const uint8_t argb2[] = {0xff, c2 >> 16 & 0xff, c2 >> 8 & 0xff, c2 & 0xff};
790  mean_err += diff(argb1, argb2, s->trans_thresh);
791  }
792  src1 += src1_linesize;
793  src2 += src2_linesize;
794  }
795 
796  s->total_mean_err += mean_err;
797 
798  av_log(NULL, AV_LOG_INFO, "MEP:%.3f TotalMEP:%.3f\n",
799  mean_err / div, s->total_mean_err / (div * frame_count));
800 }
801 
803  const AVFrame *prv_src, const AVFrame *cur_src,
804  const AVFrame *prv_dst, AVFrame *cur_dst,
805  int *xp, int *yp, int *wp, int *hp)
806 {
807  int x_start = 0, y_start = 0;
808  int width = cur_src->width;
809  int height = cur_src->height;
810 
811  if (prv_src->data[0] && diff_mode == DIFF_MODE_RECTANGLE) {
812  int y;
813  int x_end = cur_src->width - 1,
814  y_end = cur_src->height - 1;
815  const uint32_t *prv_srcp = (const uint32_t *)prv_src->data[0];
816  const uint32_t *cur_srcp = (const uint32_t *)cur_src->data[0];
817  const uint8_t *prv_dstp = prv_dst->data[0];
818  uint8_t *cur_dstp = cur_dst->data[0];
819 
820  const int prv_src_linesize = prv_src->linesize[0] >> 2;
821  const int cur_src_linesize = cur_src->linesize[0] >> 2;
822  const int prv_dst_linesize = prv_dst->linesize[0];
823  const int cur_dst_linesize = cur_dst->linesize[0];
824 
825  /* skip common lines */
826  while (y_start < y_end && !memcmp(prv_srcp + y_start*prv_src_linesize,
827  cur_srcp + y_start*cur_src_linesize,
828  cur_src->width * 4)) {
829  memcpy(cur_dstp + y_start*cur_dst_linesize,
830  prv_dstp + y_start*prv_dst_linesize,
831  cur_dst->width);
832  y_start++;
833  }
834  while (y_end > y_start && !memcmp(prv_srcp + y_end*prv_src_linesize,
835  cur_srcp + y_end*cur_src_linesize,
836  cur_src->width * 4)) {
837  memcpy(cur_dstp + y_end*cur_dst_linesize,
838  prv_dstp + y_end*prv_dst_linesize,
839  cur_dst->width);
840  y_end--;
841  }
842 
843  height = y_end + 1 - y_start;
844 
845  /* skip common columns */
846  while (x_start < x_end) {
847  int same_column = 1;
848  for (y = y_start; y <= y_end; y++) {
849  if (prv_srcp[y*prv_src_linesize + x_start] != cur_srcp[y*cur_src_linesize + x_start]) {
850  same_column = 0;
851  break;
852  }
853  }
854  if (!same_column)
855  break;
856  x_start++;
857  }
858  while (x_end > x_start) {
859  int same_column = 1;
860  for (y = y_start; y <= y_end; y++) {
861  if (prv_srcp[y*prv_src_linesize + x_end] != cur_srcp[y*cur_src_linesize + x_end]) {
862  same_column = 0;
863  break;
864  }
865  }
866  if (!same_column)
867  break;
868  x_end--;
869  }
870  width = x_end + 1 - x_start;
871 
872  if (x_start) {
873  for (y = y_start; y <= y_end; y++)
874  memcpy(cur_dstp + y*cur_dst_linesize,
875  prv_dstp + y*prv_dst_linesize, x_start);
876  }
877  if (x_end != cur_src->width - 1) {
878  const int copy_len = cur_src->width - 1 - x_end;
879  for (y = y_start; y <= y_end; y++)
880  memcpy(cur_dstp + y*cur_dst_linesize + x_end + 1,
881  prv_dstp + y*prv_dst_linesize + x_end + 1,
882  copy_len);
883  }
884  }
885  *xp = x_start;
886  *yp = y_start;
887  *wp = width;
888  *hp = height;
889 }
890 
892 {
893  int x, y, w, h, ret;
894  AVFilterContext *ctx = inlink->dst;
895  PaletteUseContext *s = ctx->priv;
896  AVFilterLink *outlink = inlink->dst->outputs[0];
897 
898  AVFrame *out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
899  if (!out) {
900  *outf = NULL;
901  return AVERROR(ENOMEM);
902  }
904 
905  set_processing_window(s->diff_mode, s->last_in, in,
906  s->last_out, out, &x, &y, &w, &h);
907  av_frame_unref(s->last_in);
908  av_frame_unref(s->last_out);
909  if ((ret = av_frame_ref(s->last_in, in)) < 0 ||
910  (ret = av_frame_ref(s->last_out, out)) < 0 ||
911  (ret = av_frame_make_writable(s->last_in)) < 0) {
912  av_frame_free(&out);
913  *outf = NULL;
914  return ret;
915  }
916 
917  ff_dlog(ctx, "%dx%d rect: (%d;%d) -> (%d,%d) [area:%dx%d]\n",
918  w, h, x, y, x+w, y+h, in->width, in->height);
919 
920  ret = s->set_frame(s, out, in, x, y, w, h);
921  if (ret < 0) {
922  av_frame_free(&out);
923  *outf = NULL;
924  return ret;
925  }
926  memcpy(out->data[1], s->palette, AVPALETTE_SIZE);
927  if (s->calc_mean_err)
928  debug_mean_error(s, in, out, inlink->frame_count_out);
929  *outf = out;
930  return 0;
931 }
932 
933 static int config_output(AVFilterLink *outlink)
934 {
935  int ret;
936  AVFilterContext *ctx = outlink->src;
937  PaletteUseContext *s = ctx->priv;
938 
940  if (ret < 0)
941  return ret;
942  s->fs.opt_repeatlast = 1; // only 1 frame in the palette
943  s->fs.in[1].before = s->fs.in[1].after = EXT_INFINITY;
944  s->fs.on_event = load_apply_palette;
945 
946  outlink->w = ctx->inputs[0]->w;
947  outlink->h = ctx->inputs[0]->h;
948 
949  outlink->time_base = ctx->inputs[0]->time_base;
950  if ((ret = ff_framesync_configure(&s->fs)) < 0)
951  return ret;
952  return 0;
953 }
954 
956 {
957  AVFilterContext *ctx = inlink->dst;
958 
959  if (inlink->w * inlink->h != AVPALETTE_COUNT) {
961  "Palette input must contain exactly %d pixels. "
962  "Specified input has %dx%d=%d pixels\n",
963  AVPALETTE_COUNT, inlink->w, inlink->h,
964  inlink->w * inlink->h);
965  return AVERROR(EINVAL);
966  }
967  return 0;
968 }
969 
970 static void load_palette(PaletteUseContext *s, const AVFrame *palette_frame)
971 {
972  int i, x, y;
973  const uint32_t *p = (const uint32_t *)palette_frame->data[0];
974  const int p_linesize = palette_frame->linesize[0] >> 2;
975 
976  s->transparency_index = -1;
977 
978  if (s->new) {
979  memset(s->palette, 0, sizeof(s->palette));
980  memset(s->map, 0, sizeof(s->map));
981  for (i = 0; i < CACHE_SIZE; i++)
982  av_freep(&s->cache[i].entries);
983  memset(s->cache, 0, sizeof(s->cache));
984  }
985 
986  i = 0;
987  for (y = 0; y < palette_frame->height; y++) {
988  for (x = 0; x < palette_frame->width; x++) {
989  s->palette[i] = p[x];
990  if (p[x]>>24 < s->trans_thresh) {
991  s->transparency_index = i; // we are assuming at most one transparent color in palette
992  }
993  i++;
994  }
995  p += p_linesize;
996  }
997 
998  load_colormap(s);
999 
1000  if (!s->new)
1001  s->palette_loaded = 1;
1002 }
1003 
1005 {
1006  AVFilterContext *ctx = fs->parent;
1007  AVFilterLink *inlink = ctx->inputs[0];
1008  PaletteUseContext *s = ctx->priv;
1009  AVFrame *master, *second, *out = NULL;
1010  int ret;
1011 
1012  // writable for error diffusal dithering
1014  if (ret < 0)
1015  return ret;
1016  if (!master || !second) {
1018  return AVERROR_BUG;
1019  }
1020  if (!s->palette_loaded) {
1021  load_palette(s, second);
1022  }
1025  if (ret < 0)
1026  return ret;
1027  return ff_filter_frame(ctx->outputs[0], out);
1028 }
1029 
1030 #define DEFINE_SET_FRAME(color_search, name, value) \
1031 static int set_frame_##name(PaletteUseContext *s, AVFrame *out, AVFrame *in, \
1032  int x_start, int y_start, int w, int h) \
1033 { \
1034  return set_frame(s, out, in, x_start, y_start, w, h, value, color_search); \
1035 }
1036 
1037 #define DEFINE_SET_FRAME_COLOR_SEARCH(color_search, color_search_macro) \
1038  DEFINE_SET_FRAME(color_search_macro, color_search##_##none, DITHERING_NONE) \
1039  DEFINE_SET_FRAME(color_search_macro, color_search##_##bayer, DITHERING_BAYER) \
1040  DEFINE_SET_FRAME(color_search_macro, color_search##_##heckbert, DITHERING_HECKBERT) \
1041  DEFINE_SET_FRAME(color_search_macro, color_search##_##floyd_steinberg, DITHERING_FLOYD_STEINBERG) \
1042  DEFINE_SET_FRAME(color_search_macro, color_search##_##sierra2, DITHERING_SIERRA2) \
1043  DEFINE_SET_FRAME(color_search_macro, color_search##_##sierra2_4a, DITHERING_SIERRA2_4A) \
1044 
1048 
1049 #define DITHERING_ENTRIES(color_search) { \
1050  set_frame_##color_search##_none, \
1051  set_frame_##color_search##_bayer, \
1052  set_frame_##color_search##_heckbert, \
1053  set_frame_##color_search##_floyd_steinberg, \
1054  set_frame_##color_search##_sierra2, \
1055  set_frame_##color_search##_sierra2_4a, \
1056 }
1057 
1059  DITHERING_ENTRIES(nns_iterative),
1060  DITHERING_ENTRIES(nns_recursive),
1061  DITHERING_ENTRIES(bruteforce),
1062 };
1063 
1064 static int dither_value(int p)
1065 {
1066  const int q = p ^ (p >> 3);
1067  return (p & 4) >> 2 | (q & 4) >> 1 \
1068  | (p & 2) << 1 | (q & 2) << 2 \
1069  | (p & 1) << 4 | (q & 1) << 5;
1070 }
1071 
1073 {
1074  PaletteUseContext *s = ctx->priv;
1075 
1076  s->last_in = av_frame_alloc();
1077  s->last_out = av_frame_alloc();
1078  if (!s->last_in || !s->last_out) {
1079  av_frame_free(&s->last_in);
1080  av_frame_free(&s->last_out);
1081  return AVERROR(ENOMEM);
1082  }
1083 
1084  s->set_frame = set_frame_lut[s->color_search_method][s->dither];
1085 
1086  if (s->dither == DITHERING_BAYER) {
1087  int i;
1088  const int delta = 1 << (5 - s->bayer_scale); // to avoid too much luma
1089 
1090  for (i = 0; i < FF_ARRAY_ELEMS(s->ordered_dither); i++)
1091  s->ordered_dither[i] = (dither_value(i) >> s->bayer_scale) - delta;
1092  }
1093 
1094  return 0;
1095 }
1096 
1098 {
1099  PaletteUseContext *s = ctx->priv;
1100  return ff_framesync_activate(&s->fs);
1101 }
1102 
1104 {
1105  int i;
1106  PaletteUseContext *s = ctx->priv;
1107 
1108  ff_framesync_uninit(&s->fs);
1109  for (i = 0; i < CACHE_SIZE; i++)
1110  av_freep(&s->cache[i].entries);
1111  av_frame_free(&s->last_in);
1112  av_frame_free(&s->last_out);
1113 }
1114 
1115 static const AVFilterPad paletteuse_inputs[] = {
1116  {
1117  .name = "default",
1118  .type = AVMEDIA_TYPE_VIDEO,
1119  },{
1120  .name = "palette",
1121  .type = AVMEDIA_TYPE_VIDEO,
1122  .config_props = config_input_palette,
1123  },
1124  { NULL }
1125 };
1126 
1128  {
1129  .name = "default",
1130  .type = AVMEDIA_TYPE_VIDEO,
1131  .config_props = config_output,
1132  },
1133  { NULL }
1134 };
1135 
1137  .name = "paletteuse",
1138  .description = NULL_IF_CONFIG_SMALL("Use a palette to downsample an input video stream."),
1139  .priv_size = sizeof(PaletteUseContext),
1141  .init = init,
1142  .uninit = uninit,
1143  .activate = activate,
1146  .priv_class = &paletteuse_class,
1147 };
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:99
cached_color::color
uint32_t color
Definition: vf_paletteuse.c:69
ff_framesync_configure
int ff_framesync_configure(FFFrameSync *fs)
Configure a frame sync structure.
Definition: framesync.c:117
AV_BPRINT_SIZE_UNLIMITED
#define AV_BPRINT_SIZE_UNLIMITED
config_input_palette
static int config_input_palette(AVFilterLink *inlink)
Definition: vf_paletteuse.c:955
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
diff
static av_always_inline int diff(const uint8_t *c1, const uint8_t *c2, const int trans_thresh)
Definition: vf_paletteuse.c:163
get_dst_color_err
static av_always_inline int get_dst_color_err(PaletteUseContext *s, uint32_t c, int *er, int *eg, int *eb, const enum color_search_method search_method)
Definition: vf_paletteuse.c:369
colormap_nearest_node
static void colormap_nearest_node(const struct color_node *map, const int node_pos, const uint8_t *target, const int trans_thresh, struct nearest_color *nearest)
Definition: vf_paletteuse.c:209
query_formats
static int query_formats(AVFilterContext *ctx)
Definition: vf_paletteuse.c:139
PaletteUseContext::dot_filename
char * dot_filename
Definition: vf_paletteuse.c:102
r
const char * r
Definition: vf_curves.c:114
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
ff_make_format_list
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:300
av_bprint_finalize
int av_bprint_finalize(AVBPrint *buf, char **ret_str)
Finalize a print buffer.
Definition: bprint.c:235
ff_framesync_uninit
void ff_framesync_uninit(FFFrameSync *fs)
Free all memory currently allocated.
Definition: framesync.c:283
debug_mean_error
static void debug_mean_error(PaletteUseContext *s, const AVFrame *in1, const AVFrame *in2, int frame_count)
Definition: vf_paletteuse.c:772
out
FILE * out
Definition: movenc.c:54
color
Definition: vf_paletteuse.c:582
av_bprint_init
void av_bprint_init(AVBPrint *buf, unsigned size_init, unsigned size_max)
Definition: bprint.c:69
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1075
PaletteUseContext::last_out
AVFrame * last_out
Definition: vf_paletteuse.c:99
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
end
static av_cold int end(AVCodecContext *avctx)
Definition: avrndec.c:92
init
static av_cold int init(AVFilterContext *ctx)
Definition: vf_paletteuse.c:1072
set_frame_func
int(* set_frame_func)(struct PaletteUseContext *s, AVFrame *out, AVFrame *in, int x_start, int y_start, int width, int height)
Definition: vf_paletteuse.c:80
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:300
av_frame_make_writable
int av_frame_make_writable(AVFrame *frame)
Ensure that the frame data is writable, avoiding data copy if possible.
Definition: frame.c:612
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_paletteuse.c:1103
AVFrame::width
int width
Definition: frame.h:358
w
uint8_t w
Definition: llviddspenc.c:38
av_dynarray2_add
void * av_dynarray2_add(void **tab_ptr, int *nb_ptr, size_t elem_size, const uint8_t *elem_data)
Add an element of size elem_size to a dynamic array.
Definition: mem.c:324
AVOption
AVOption.
Definition: opt.h:246
b
#define b
Definition: input.c:41
stack_node::dx2
int dx2
Definition: vf_paletteuse.c:249
data
const char data[16]
Definition: mxf.c:91
colormap_nearest_bruteforce
static av_always_inline uint8_t colormap_nearest_bruteforce(const uint32_t *palette, const uint8_t *argb, const int trans_thresh)
Definition: vf_paletteuse.c:179
PaletteUseContext::set_frame
set_frame_func set_frame
Definition: vf_paletteuse.c:94
ff_vf_paletteuse
AVFilter ff_vf_paletteuse
Definition: vf_paletteuse.c:1136
disp_tree
static int disp_tree(const struct color_node *node, const char *fname)
Definition: vf_paletteuse.c:526
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:148
c1
static const uint64_t c1
Definition: murmur3.c:49
FFFrameSync
Frame sync structure.
Definition: framesync.h:146
EXT_INFINITY
@ EXT_INFINITY
Extend the frame to infinity.
Definition: framesync.h:75
hash
uint8_t hash[HASH_SIZE]
Definition: movenc.c:57
PaletteUseContext::palette_loaded
int palette_loaded
Definition: vf_paletteuse.c:91
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:314
colormap_nearest_recursive
static av_always_inline uint8_t colormap_nearest_recursive(const struct color_node *node, const uint8_t *rgb, const int trans_thresh)
Definition: vf_paletteuse.c:240
stack_node::color_id
int color_id
Definition: vf_paletteuse.c:248
debug_accuracy
static int debug_accuracy(const struct color_node *node, const uint32_t *palette, const int trans_thresh, const enum color_search_method search_method)
Definition: vf_paletteuse.c:551
DIFF_MODE_NONE
@ DIFF_MODE_NONE
Definition: vf_paletteuse.c:53
NB_DITHERING
@ NB_DITHERING
Definition: vf_paletteuse.c:42
dither_value
static int dither_value(int p)
Definition: vf_paletteuse.c:1064
COLOR_SEARCH_BRUTEFORCE
@ COLOR_SEARCH_BRUTEFORCE
Definition: vf_paletteuse.c:48
apply_palette
static int apply_palette(AVFilterLink *inlink, AVFrame *in, AVFrame **outf)
Definition: vf_paletteuse.c:891
PaletteUseContext::cache
struct cache_node cache[CACHE_SIZE]
Definition: vf_paletteuse.c:86
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:54
colormap_insert
static int colormap_insert(struct color_node *map, uint8_t *color_used, int *nb_used, const uint32_t *palette, const int trans_thresh, const struct color_rect *box)
Definition: vf_paletteuse.c:673
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:190
PaletteUseContext::ordered_dither
int ordered_dither[8 *8]
Definition: vf_paletteuse.c:96
colormap_nearest_iterative
static av_always_inline uint8_t colormap_nearest_iterative(const struct color_node *root, const uint8_t *target, const int trans_thresh)
Definition: vf_paletteuse.c:252
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
set_processing_window
static void set_processing_window(enum diff_mode diff_mode, const AVFrame *prv_src, const AVFrame *cur_src, const AVFrame *prv_dst, AVFrame *cur_dst, int *xp, int *yp, int *wp, int *hp)
Definition: vf_paletteuse.c:802
av_cold
#define av_cold
Definition: attributes.h:90
av_fopen_utf8
FILE * av_fopen_utf8(const char *path, const char *mode)
Open a file using a UTF-8 filename.
Definition: file_open.c:158
INDENT
#define INDENT
Definition: vf_paletteuse.c:496
color_rect
Definition: vf_paletteuse.c:587
DEFINE_SET_FRAME_COLOR_SEARCH
#define DEFINE_SET_FRAME_COLOR_SEARCH(color_search, color_search_macro)
Definition: vf_paletteuse.c:1037
PaletteUseContext::bayer_scale
int bayer_scale
Definition: vf_paletteuse.c:95
width
#define width
s
#define s(width, name)
Definition: cbs_vp9.c:257
dithering_mode
dithering_mode
Definition: vf_paletteuse.c:35
config_output
static int config_output(AVFilterLink *outlink)
Definition: vf_paletteuse.c:933
g
const char * g
Definition: vf_curves.c:115
ff_formats_ref
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:484
color_node::right_id
int right_id
Definition: vf_paletteuse.c:62
DITHERING_HECKBERT
@ DITHERING_HECKBERT
Definition: vf_paletteuse.c:38
stack_node
Definition: vf_paletteuse.c:247
outputs
static const AVFilterPad outputs[]
Definition: af_acontrast.c:203
filters.h
nearest_color::dist_sqd
int dist_sqd
Definition: vf_paletteuse.c:206
ctx
AVFormatContext * ctx
Definition: movenc.c:48
set_frame_lut
static const set_frame_func set_frame_lut[NB_COLOR_SEARCHES][NB_DITHERING]
Definition: vf_paletteuse.c:1058
color_rect::max
uint8_t max[3]
Definition: vf_paletteuse.c:589
f
#define f(width, name)
Definition: cbs_vp9.c:255
if
if(ret)
Definition: filter_design.txt:179
color_node::palette_id
uint8_t palette_id
Definition: vf_paletteuse.c:60
load_apply_palette
static int load_apply_palette(FFFrameSync *fs)
Definition: vf_paletteuse.c:1004
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:67
NULL
#define NULL
Definition: coverity.c:32
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:659
PaletteUseContext::dither
int dither
Definition: vf_paletteuse.c:92
fs
#define fs(width, name, subs,...)
Definition: cbs_vp9.c:259
AVPALETTE_SIZE
#define AVPALETTE_SIZE
Definition: pixfmt.h:32
COLORMAP_NEAREST
#define COLORMAP_NEAREST(search, palette, root, target, trans_thresh)
Definition: vf_paletteuse.c:324
src
#define src
Definition: vp8dsp.c:254
get_next_color
static int get_next_color(const uint8_t *color_used, const uint32_t *palette, const int trans_thresh, int *component, const struct color_rect *box)
Definition: vf_paletteuse.c:610
DITHERING_ENTRIES
#define DITHERING_ENTRIES(color_search)
Definition: vf_paletteuse.c:1049
inputs
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several inputs
Definition: filter_design.txt:243
PaletteUseContext
Definition: vf_paletteuse.c:83
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
cmp_funcs
static const cmp_func cmp_funcs[]
Definition: vf_paletteuse.c:608
AVPALETTE_COUNT
#define AVPALETTE_COUNT
Definition: pixfmt.h:33
disp_node
static void disp_node(AVBPrint *buf, const struct color_node *map, int parent_id, int node_id, int depth)
Definition: vf_paletteuse.c:497
DITHERING_NONE
@ DITHERING_NONE
Definition: vf_paletteuse.c:36
for
for(j=16;j >0;--j)
Definition: h264pred_template.c:469
paletteuse_options
static const AVOption paletteuse_options[]
Definition: vf_paletteuse.c:111
ff_dlog
#define ff_dlog(a,...)
Definition: tableprint_vlc.h:29
PaletteUseContext::trans_thresh
int trans_thresh
Definition: vf_paletteuse.c:90
qsort.h
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:188
ff_framesync_init_dualinput
int ff_framesync_init_dualinput(FFFrameSync *fs, AVFilterContext *parent)
Initialize a frame sync structure for dualinput.
Definition: framesync.c:351
master
const char * master
Definition: vf_curves.c:117
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:444
nearest_color
Definition: vf_paletteuse.c:204
DITHERING_BAYER
@ DITHERING_BAYER
Definition: vf_paletteuse.c:37
av_err2str
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: error.h:119
DITHERING_FLOYD_STEINBERG
@ DITHERING_FLOYD_STEINBERG
Definition: vf_paletteuse.c:39
PaletteUseContext::palette
uint32_t palette[AVPALETTE_COUNT]
Definition: vf_paletteuse.c:88
color
static const uint32_t color[16+AV_CLASS_CATEGORY_NB]
Definition: log.c:92
DITHERING_SIERRA2
@ DITHERING_SIERRA2
Definition: vf_paletteuse.c:40
PaletteUseContext::fs
FFFrameSync fs
Definition: vf_paletteuse.c:85
split
static char * split(char *message, char delim)
Definition: af_channelmap.c:81
height
#define height
AV_PIX_FMT_RGB32
#define AV_PIX_FMT_RGB32
Definition: pixfmt.h:370
a
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:41
color_get
static av_always_inline int color_get(PaletteUseContext *s, uint32_t color, uint8_t a, uint8_t r, uint8_t g, uint8_t b, const enum color_search_method search_method)
Check if the requested color is in the cache already.
Definition: vf_paletteuse.c:335
DITHERING_SIERRA2_4A
@ DITHERING_SIERRA2_4A
Definition: vf_paletteuse.c:41
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:187
PaletteUseContext::transparency_index
int transparency_index
Definition: vf_paletteuse.c:89
internal.h
DECLARE_CMP_FUNC
#define DECLARE_CMP_FUNC(name, pos)
Definition: vf_paletteuse.c:594
activate
static int activate(AVFilterContext *ctx)
Definition: vf_paletteuse.c:1097
src1
#define src1
Definition: h264pred.c:139
OFFSET
#define OFFSET(x)
Definition: vf_paletteuse.c:109
in
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(const int16_t *) pi >> 8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(const int32_t *) pi >> 24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31)))) #define SET_CONV_FUNC_GROUP(ofmt, ifmt) static void set_generic_function(AudioConvert *ac) { } void ff_audio_convert_free(AudioConvert **ac) { if(! *ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);} AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, int sample_rate, int apply_map) { AudioConvert *ac;int in_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) return NULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method !=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt) > 2) { ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc) { av_free(ac);return NULL;} return ac;} in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar) { ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar ? ac->channels :1;} else if(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;else ac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);return ac;} int ff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in) { int use_generic=1;int len=in->nb_samples;int p;if(ac->dc) { av_log(ac->avr, AV_LOG_TRACE, "%d samples - audio_convert: %s to %s (dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));return ff_convert_dither(ac-> in
Definition: audio_convert.c:326
bprint.h
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
cache_node
Definition: vf_paletteuse.c:73
AV_QSORT
#define AV_QSORT(p, num, type, cmp)
Quicksort This sort is fast, and fully inplace but not stable and it is possible to construct input t...
Definition: qsort.h:33
internal.h
cmp_pal_entry
static int cmp_pal_entry(const void *a, const void *b)
Definition: vf_paletteuse.c:719
delta
float delta
Definition: vorbis_enc_data.h:457
av_always_inline
#define av_always_inline
Definition: attributes.h:49
cache_node::entries
struct cached_color * entries
Definition: vf_paletteuse.c:74
uint8_t
uint8_t
Definition: audio_convert.c:194
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:554
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:60
PaletteUseContext::diff_mode
int diff_mode
Definition: vf_paletteuse.c:97
color_node::split
int split
Definition: vf_paletteuse.c:61
cached_color::pal_entry
uint8_t pal_entry
Definition: vf_paletteuse.c:70
load_colormap
static void load_colormap(PaletteUseContext *s)
Definition: vf_paletteuse.c:726
PaletteUseContext::total_mean_err
uint64_t total_mean_err
Definition: vf_paletteuse.c:105
diff_mode
diff_mode
Definition: vf_paletteuse.c:52
FLAGS
#define FLAGS
Definition: vf_paletteuse.c:110
AVFilter
Filter definition.
Definition: avfilter.h:144
cache_node::nb_entries
int nb_entries
Definition: vf_paletteuse.c:75
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
ret
ret
Definition: filter_design.txt:187
PaletteUseContext::color_search_method
int color_search_method
Definition: vf_paletteuse.c:103
pos
unsigned int pos
Definition: spdifenc.c:412
av_bprintf
void av_bprintf(AVBPrint *buf, const char *fmt,...)
Definition: bprint.c:94
set_frame
static av_always_inline int set_frame(PaletteUseContext *s, AVFrame *out, AVFrame *in, int x_start, int y_start, int w, int h, enum dithering_mode dither, const enum color_search_method search_method)
Definition: vf_paletteuse.c:388
left
Tag MUST be and< 10hcoeff half pel interpolation filter coefficients, hcoeff[0] are the 2 middle coefficients[1] are the next outer ones and so on, resulting in a filter like:...eff[2], hcoeff[1], hcoeff[0], hcoeff[0], hcoeff[1], hcoeff[2] ... the sign of the coefficients is not explicitly stored but alternates after each coeff and coeff[0] is positive, so ...,+,-,+,-,+,+,-,+,-,+,... hcoeff[0] is not explicitly stored but found by subtracting the sum of all stored coefficients with signs from 32 hcoeff[0]=32 - hcoeff[1] - hcoeff[2] - ... a good choice for hcoeff and htaps is htaps=6 hcoeff={40,-10, 2} an alternative which requires more computations at both encoder and decoder side and may or may not be better is htaps=8 hcoeff={42,-14, 6,-2}ref_frames minimum of the number of available reference frames and max_ref_frames for example the first frame after a key frame always has ref_frames=1spatial_decomposition_type wavelet type 0 is a 9/7 symmetric compact integer wavelet 1 is a 5/3 symmetric compact integer wavelet others are reserved stored as delta from last, last is reset to 0 if always_reset||keyframeqlog quality(logarithmic quantizer scale) stored as delta from last, last is reset to 0 if always_reset||keyframemv_scale stored as delta from last, last is reset to 0 if always_reset||keyframe FIXME check that everything works fine if this changes between framesqbias dequantization bias stored as delta from last, last is reset to 0 if always_reset||keyframeblock_max_depth maximum depth of the block tree stored as delta from last, last is reset to 0 if always_reset||keyframequant_table quantization tableHighlevel bitstream structure:==============================--------------------------------------------|Header|--------------------------------------------|------------------------------------|||Block0||||split?||||yes no||||......... intra?||||:Block01 :yes no||||:Block02 :....... ..........||||:Block03 ::y DC ::ref index:||||:Block04 ::cb DC ::motion x :||||......... :cr DC ::motion y :||||....... ..........|||------------------------------------||------------------------------------|||Block1|||...|--------------------------------------------|------------ ------------ ------------|||Y subbands||Cb subbands||Cr subbands||||--- ---||--- ---||--- ---|||||LL0||HL0||||LL0||HL0||||LL0||HL0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||LH0||HH0||||LH0||HH0||||LH0||HH0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HL1||LH1||||HL1||LH1||||HL1||LH1|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HH1||HL2||||HH1||HL2||||HH1||HL2|||||...||...||...|||------------ ------------ ------------|--------------------------------------------Decoding process:=================------------|||Subbands|------------||||------------|Intra DC||||LL0 subband prediction ------------|\ Dequantization ------------------- \||Reference frames|\ IDWT|------- -------|Motion \|||Frame 0||Frame 1||Compensation . OBMC v -------|------- -------|--------------. \------> Frame n output Frame Frame<----------------------------------/|...|------------------- Range Coder:============Binary Range Coder:------------------- The implemented range coder is an adapted version based upon "Range encoding: an algorithm for removing redundancy from a digitised message." by G. N. N. Martin. The symbols encoded by the Snow range coder are bits(0|1). The associated probabilities are not fix but change depending on the symbol mix seen so far. bit seen|new state ---------+----------------------------------------------- 0|256 - state_transition_table[256 - old_state];1|state_transition_table[old_state];state_transition_table={ 0, 0, 0, 0, 0, 0, 0, 0, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 190, 191, 192, 194, 194, 195, 196, 197, 198, 199, 200, 201, 202, 202, 204, 205, 206, 207, 208, 209, 209, 210, 211, 212, 213, 215, 215, 216, 217, 218, 219, 220, 220, 222, 223, 224, 225, 226, 227, 227, 229, 229, 230, 231, 232, 234, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 248, 0, 0, 0, 0, 0, 0, 0};FIXME Range Coding of integers:------------------------- FIXME Neighboring Blocks:===================left and top are set to the respective blocks unless they are outside of the image in which case they are set to the Null block top-left is set to the top left block unless it is outside of the image in which case it is set to the left block if this block has no larger parent block or it is at the left side of its parent block and the top right block is not outside of the image then the top right block is used for top-right else the top-left block is used Null block y, cb, cr are 128 level, ref, mx and my are 0 Motion Vector Prediction:=========================1. the motion vectors of all the neighboring blocks are scaled to compensate for the difference of reference frames scaled_mv=(mv *(256 *(current_reference+1)/(mv.reference+1))+128)> the median of the scaled left
Definition: snow.txt:386
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen_template.c:38
NBITS
#define NBITS
Definition: vf_paletteuse.c:65
AVFrame::height
int height
Definition: frame.h:358
c2
static const uint64_t c2
Definition: murmur3.c:50
framesync.h
DIFF_MODE_RECTANGLE
@ DIFF_MODE_RECTANGLE
Definition: vf_paletteuse.c:54
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
COLOR_SEARCH_NNS_ITERATIVE
@ COLOR_SEARCH_NNS_ITERATIVE
Definition: vf_paletteuse.c:46
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:223
avfilter.h
cmp_func
int(* cmp_func)(const void *, const void *)
Definition: vf_paletteuse.c:592
PaletteUseContext::map
struct color_node map[AVPALETTE_COUNT]
Definition: vf_paletteuse.c:87
COLOR_SEARCH_NNS_RECURSIVE
@ COLOR_SEARCH_NNS_RECURSIVE
Definition: vf_paletteuse.c:47
PaletteUseContext::debug_accuracy
int debug_accuracy
Definition: vf_paletteuse.c:106
AVFilterContext
An instance of a filter.
Definition: avfilter.h:338
shift
static int shift(int a, int b)
Definition: sonic.c:82
color_node::val
uint8_t val[4]
Definition: vf_paletteuse.c:59
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
CACHE_SIZE
#define CACHE_SIZE
Definition: vf_paletteuse.c:66
map
const VDPAUPixFmtMap * map
Definition: hwcontext_vdpau.c:85
color::pal_id
uint8_t pal_id
Definition: vf_paletteuse.c:584
NB_COLOR_SEARCHES
@ NB_COLOR_SEARCHES
Definition: vf_paletteuse.c:49
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Definition: opt.h:240
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
color::value
uint32_t value
Definition: vf_paletteuse.c:583
paletteuse_outputs
static const AVFilterPad paletteuse_outputs[]
Definition: vf_paletteuse.c:1127
PaletteUseContext::calc_mean_err
int calc_mean_err
Definition: vf_paletteuse.c:104
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:50
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:331
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
color_rect::min
uint8_t min[3]
Definition: vf_paletteuse.c:588
h
h
Definition: vp9dsp_template.c:2038
ff_framesync_activate
int ff_framesync_activate(FFFrameSync *fs)
Examine the frames in the filter's input and try to produce output.
Definition: framesync.c:334
color_node::left_id
int left_id
Definition: vf_paletteuse.c:62
color_node
Definition: vf_paletteuse.c:58
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(paletteuse)
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Definition: opt.h:227
ff_framesync_dualinput_get_writable
int ff_framesync_dualinput_get_writable(FFFrameSync *fs, AVFrame **f0, AVFrame **f1)
Same as ff_framesync_dualinput_get(), but make sure that f0 is writable.
Definition: framesync.c:389
dither_color
static av_always_inline int dither_color(uint32_t px, int er, int eg, int eb, int scale, int shift)
Definition: vf_paletteuse.c:155
int
int
Definition: ffmpeg_filter.c:192
PaletteUseContext::last_in
AVFrame * last_in
Definition: vf_paletteuse.c:98
nearest_color::node_pos
int node_pos
Definition: vf_paletteuse.c:205
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:232
paletteuse_inputs
static const AVFilterPad paletteuse_inputs[]
Definition: vf_paletteuse.c:1115
load_palette
static void load_palette(PaletteUseContext *s, const AVFrame *palette_frame)
Definition: vf_paletteuse.c:970
cached_color
Definition: vf_paletteuse.c:68
color_search_method
color_search_method
Definition: vf_paletteuse.c:45
min
float min
Definition: vorbis_enc_data.h:456
NB_DIFF_MODE
@ NB_DIFF_MODE
Definition: vf_paletteuse.c:55
dither
static const uint8_t dither[8][8]
Definition: vf_fspp.c:57