FFmpeg
vf_elbg.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2013 Stefano Sabatini
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 /**
22  * @file
23  * video quantizer filter based on ELBG
24  */
25 
26 #include "libavcodec/elbg.h"
27 #include "libavutil/mem.h"
28 #include "libavutil/opt.h"
29 #include "libavutil/pixdesc.h"
30 #include "libavutil/random_seed.h"
31 
32 #include "avfilter.h"
33 #include "drawutils.h"
34 #include "formats.h"
35 #include "internal.h"
36 #include "video.h"
37 
38 typedef struct ELBGFilterContext {
39  const AVClass *class;
40  struct ELBGContext *ctx;
42  int64_t lfg_seed;
44  int *codeword;
47  int *codebook;
50  uint8_t rgba_map[4];
51  int use_alpha;
52  int pal8;
54 
55 #define OFFSET(x) offsetof(ELBGFilterContext, x)
56 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
57 
58 static const AVOption elbg_options[] = {
59  { "codebook_length", "set codebook length", OFFSET(codebook_length), AV_OPT_TYPE_INT, { .i64 = 256 }, 1, INT_MAX, FLAGS },
60  { "l", "set codebook length", OFFSET(codebook_length), AV_OPT_TYPE_INT, { .i64 = 256 }, 1, INT_MAX, FLAGS },
61  { "nb_steps", "set max number of steps used to compute the mapping", OFFSET(max_steps_nb), AV_OPT_TYPE_INT, { .i64 = 1 }, 1, INT_MAX, FLAGS },
62  { "n", "set max number of steps used to compute the mapping", OFFSET(max_steps_nb), AV_OPT_TYPE_INT, { .i64 = 1 }, 1, INT_MAX, FLAGS },
63  { "seed", "set the random seed", OFFSET(lfg_seed), AV_OPT_TYPE_INT64, {.i64 = -1}, -1, UINT32_MAX, FLAGS },
64  { "s", "set the random seed", OFFSET(lfg_seed), AV_OPT_TYPE_INT64, { .i64 = -1 }, -1, UINT32_MAX, FLAGS },
65  { "pal8", "set the pal8 output", OFFSET(pal8), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
66  { "use_alpha", "use alpha channel for mapping", OFFSET(use_alpha), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS },
67  { NULL }
68 };
69 
71 
73 {
74  ELBGFilterContext *const elbg = ctx->priv;
75 
76  if (elbg->pal8 && elbg->codebook_length > 256) {
77  av_log(ctx, AV_LOG_ERROR, "pal8 output allows max 256 codebook length.\n");
78  return AVERROR(EINVAL);
79  }
80 
81  if (elbg->lfg_seed == -1)
82  elbg->lfg_seed = av_get_random_seed();
83 
84  av_lfg_init(&elbg->lfg, elbg->lfg_seed);
85  return 0;
86 }
87 
89 {
90  ELBGFilterContext *const elbg = ctx->priv;
91  int ret;
92 
93  static const enum AVPixelFormat pix_fmts[] = {
97  };
98  if (!elbg->pal8) {
100  } else {
101  static const enum AVPixelFormat pal8_fmt[] = {
104  };
105  if ((ret = ff_formats_ref(ff_make_format_list(pix_fmts), &ctx->inputs[0]->outcfg.formats)) < 0 ||
106  (ret = ff_formats_ref(ff_make_format_list(pal8_fmt), &ctx->outputs[0]->incfg.formats)) < 0)
107  return ret;
108  }
109  return 0;
110 }
111 
112 #define NB_COMPONENTS 4
113 
115 {
116  AVFilterContext *ctx = inlink->dst;
117  ELBGFilterContext *const elbg = ctx->priv;
118 
119  elbg->pix_desc = av_pix_fmt_desc_get(inlink->format);
120  elbg->codeword_length = inlink->w * inlink->h;
121  elbg->codeword = av_realloc_f(elbg->codeword, elbg->codeword_length,
122  NB_COMPONENTS * sizeof(*elbg->codeword));
123  if (!elbg->codeword)
124  return AVERROR(ENOMEM);
125 
128  sizeof(*elbg->codeword_closest_codebook_idxs));
130  return AVERROR(ENOMEM);
131 
132  elbg->codebook = av_realloc_f(elbg->codebook, elbg->codebook_length,
133  NB_COMPONENTS * sizeof(*elbg->codebook));
134  if (!elbg->codebook)
135  return AVERROR(ENOMEM);
136 
137  ff_fill_rgba_map(elbg->rgba_map, inlink->format);
138 
139  return 0;
140 }
141 
142 #define R 0
143 #define G 1
144 #define B 2
145 #define A 3
146 
148 {
149  ELBGFilterContext *const elbg = inlink->dst->priv;
150  int i, j, k, ret;
151  uint8_t *p, *p0;
152 
153  const uint8_t r_idx = elbg->rgba_map[R];
154  const uint8_t g_idx = elbg->rgba_map[G];
155  const uint8_t b_idx = elbg->rgba_map[B];
156  const uint8_t a_idx = elbg->rgba_map[A];
157 
158  /* build the codeword */
159  p0 = frame->data[0];
160  k = 0;
161  for (i = 0; i < inlink->h; i++) {
162  p = p0;
163  for (j = 0; j < inlink->w; j++) {
164  elbg->codeword[k++] = p[b_idx];
165  elbg->codeword[k++] = p[g_idx];
166  elbg->codeword[k++] = p[r_idx];
167  elbg->codeword[k++] = elbg->use_alpha ? p[a_idx] : 0xff;
168  p += elbg->pix_desc->nb_components;
169  }
170  p0 += frame->linesize[0];
171  }
172 
173  /* compute the codebook */
174  ret = avpriv_elbg_do(&elbg->ctx, elbg->codeword, NB_COMPONENTS,
175  elbg->codeword_length, elbg->codebook,
176  elbg->codebook_length, elbg->max_steps_nb,
177  elbg->codeword_closest_codebook_idxs, &elbg->lfg, 0);
178  if (ret < 0) {
180  return ret;
181  }
182 
183  if (elbg->pal8) {
184  AVFilterLink *outlink = inlink->dst->outputs[0];
185  AVFrame *out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
186  uint32_t *pal;
187 
188  if (!out) {
190  return AVERROR(ENOMEM);
191  }
194  pal = (uint32_t *)out->data[1];
195  p0 = (uint8_t *)out->data[0];
196 
197  for (i = 0; i < elbg->codebook_length; i++) {
198  const int al = elbg->use_alpha ? elbg->codebook[i*4+3] : 0xff;
199  pal[i] = al << 24 |
200  (elbg->codebook[i*4+2] << 16) |
201  (elbg->codebook[i*4+1] << 8) |
202  elbg->codebook[i*4 ];
203  }
204 
205  k = 0;
206  for (i = 0; i < inlink->h; i++) {
207  p = p0;
208  for (j = 0; j < inlink->w; j++, p++) {
209  p[0] = elbg->codeword_closest_codebook_idxs[k++];
210  }
211  p0 += out->linesize[0];
212  }
213 
214  return ff_filter_frame(outlink, out);
215  }
216 
217  /* fill the output with the codebook values */
218  p0 = frame->data[0];
219 
220  k = 0;
221  for (i = 0; i < inlink->h; i++) {
222  p = p0;
223  for (j = 0; j < inlink->w; j++) {
224  int cb_idx = NB_COMPONENTS * elbg->codeword_closest_codebook_idxs[k++];
225  p[b_idx] = elbg->codebook[cb_idx];
226  p[g_idx] = elbg->codebook[cb_idx+1];
227  p[r_idx] = elbg->codebook[cb_idx+2];
228  p[a_idx] = elbg->use_alpha ? elbg->codebook[cb_idx+3] : 0xFFu;
229  p += elbg->pix_desc->nb_components;
230  }
231  p0 += frame->linesize[0];
232  }
233 
234  return ff_filter_frame(inlink->dst->outputs[0], frame);
235 }
236 
238 {
239  ELBGFilterContext *const elbg = ctx->priv;
240 
241  avpriv_elbg_free(&elbg->ctx);
242 
243  av_freep(&elbg->codebook);
244  av_freep(&elbg->codeword);
246 }
247 
248 static const AVFilterPad elbg_inputs[] = {
249  {
250  .name = "default",
251  .type = AVMEDIA_TYPE_VIDEO,
253  .config_props = config_input,
254  .filter_frame = filter_frame,
255  },
256 };
257 
259  .name = "elbg",
260  .description = NULL_IF_CONFIG_SMALL("Apply posterize effect, using the ELBG algorithm."),
261  .priv_size = sizeof(ELBGFilterContext),
262  .priv_class = &elbg_class,
263  .init = init,
264  .uninit = uninit,
268 };
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:112
OFFSET
#define OFFSET(x)
Definition: vf_elbg.c:55
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
ELBGFilterContext::codeword_closest_codebook_idxs
int * codeword_closest_codebook_idxs
Definition: vf_elbg.c:46
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
ff_make_format_list
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:436
out
FILE * out
Definition: movenc.c:55
av_lfg_init
av_cold void av_lfg_init(AVLFG *c, unsigned int seed)
Definition: lfg.c:32
u
#define u(width, name, range_min, range_max)
Definition: cbs_h2645.c:251
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1015
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2965
ELBGFilterContext::lfg_seed
int64_t lfg_seed
Definition: vf_elbg.c:42
ELBGFilterContext::lfg
AVLFG lfg
Definition: vf_elbg.c:41
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
A
#define A
Definition: vf_elbg.c:145
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:160
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:374
pixdesc.h
AVOption
AVOption.
Definition: opt.h:346
FILTER_QUERY_FUNC
#define FILTER_QUERY_FUNC(func)
Definition: internal.h:159
AV_PIX_FMT_BGR24
@ AV_PIX_FMT_BGR24
packed RGB 8:8:8, 24bpp, BGRBGR...
Definition: pixfmt.h:76
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:102
G
#define G
Definition: vf_elbg.c:143
ELBGFilterContext::codeword_length
int codeword_length
Definition: vf_elbg.c:45
ELBGFilterContext::pix_desc
const AVPixFmtDescriptor * pix_desc
Definition: vf_elbg.c:49
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:170
video.h
ELBGFilterContext::rgba_map
uint8_t rgba_map[4]
Definition: vf_elbg.c:50
av_get_random_seed
uint32_t av_get_random_seed(void)
Get a seed to use in conjunction with random functions.
Definition: random_seed.c:167
formats.h
query_formats
static int query_formats(AVFilterContext *ctx)
Definition: vf_elbg.c:88
elbg_inputs
static const AVFilterPad elbg_inputs[]
Definition: vf_elbg.c:248
ELBGFilterContext::use_alpha
int use_alpha
Definition: vf_elbg.c:51
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:33
avpriv_elbg_do
int avpriv_elbg_do(ELBGContext **elbgp, int *points, int dim, int numpoints, int *codebook, int num_cb, int max_steps, int *closest_cb, AVLFG *rand_state, uintptr_t flags)
Implementation of the Enhanced LBG Algorithm Based on the paper "Neural Networks 14:1219-1237" that c...
Definition: elbg.c:463
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
av_cold
#define av_cold
Definition: attributes.h:90
ff_video_default_filterpad
const AVFilterPad ff_video_default_filterpad[1]
An AVFilterPad array whose only entry has name "default" and is of type AVMEDIA_TYPE_VIDEO.
Definition: video.c:37
ff_vf_elbg
const AVFilter ff_vf_elbg
Definition: vf_elbg.c:258
ELBGFilterContext::codebook
int * codebook
Definition: vf_elbg.c:47
ff_formats_ref
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:679
ff_set_common_formats_from_list
int ff_set_common_formats_from_list(AVFilterContext *ctx, const int *fmts)
Equivalent to ff_set_common_formats(ctx, ff_make_format_list(fmts))
Definition: formats.c:874
AV_OPT_TYPE_INT64
@ AV_OPT_TYPE_INT64
Definition: opt.h:236
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:304
ctx
AVFormatContext * ctx
Definition: movenc.c:49
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: internal.h:182
AV_PIX_FMT_RGBA
@ AV_PIX_FMT_RGBA
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:100
init
static av_cold int init(AVFilterContext *ctx)
Definition: vf_elbg.c:72
elbg.h
av_realloc_f
#define av_realloc_f(p, o, n)
Definition: tableprint_vlc.h:32
ELBGFilterContext::ctx
struct ELBGContext * ctx
Definition: vf_elbg.c:40
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
NULL
#define NULL
Definition: coverity.c:32
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:709
AVPixFmtDescriptor::nb_components
uint8_t nb_components
The number of components each pixel has, (1-4)
Definition: pixdesc.h:71
AV_PIX_FMT_ABGR
@ AV_PIX_FMT_ABGR
packed ABGR 8:8:8:8, 32bpp, ABGRABGR...
Definition: pixfmt.h:101
AVLFG
Context structure for the Lagged Fibonacci PRNG.
Definition: lfg.h:33
AV_PIX_FMT_RGB24
@ AV_PIX_FMT_RGB24
packed RGB 8:8:8, 24bpp, RGBRGB...
Definition: pixfmt.h:75
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:94
avpriv_elbg_free
av_cold void avpriv_elbg_free(ELBGContext **elbgp)
Free an ELBGContext and reset the pointer to it.
Definition: elbg.c:516
ELBGFilterContext::max_steps_nb
int max_steps_nb
Definition: vf_elbg.c:43
B
#define B
Definition: vf_elbg.c:144
NB_COMPONENTS
#define NB_COMPONENTS
Definition: vf_elbg.c:112
filter_frame
static int filter_frame(AVFilterLink *inlink, AVFrame *frame)
Definition: vf_elbg.c:147
elbg_options
static const AVOption elbg_options[]
Definition: vf_elbg.c:58
internal.h
AV_PIX_FMT_ARGB
@ AV_PIX_FMT_ARGB
packed ARGB 8:8:8:8, 32bpp, ARGBARGB...
Definition: pixfmt.h:99
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
ELBGContext
ELBG internal data.
Definition: elbg.c:47
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:39
AVFilter
Filter definition.
Definition: avfilter.h:166
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:84
ELBGFilterContext
Definition: vf_elbg.c:38
ret
ret
Definition: filter_design.txt:187
R
#define R
Definition: vf_elbg.c:142
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
FLAGS
#define FLAGS
Definition: vf_elbg.c:56
ELBGFilterContext::codebook_length
int codebook_length
Definition: vf_elbg.c:48
random_seed.h
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:235
avfilter.h
AVFilterContext
An instance of a filter.
Definition: avfilter.h:407
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_elbg.c:237
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(elbg)
mem.h
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
config_input
static int config_input(AVFilterLink *inlink)
Definition: vf_elbg.c:114
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Definition: opt.h:251
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: internal.h:183
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
ff_fill_rgba_map
int ff_fill_rgba_map(uint8_t *rgba_map, enum AVPixelFormat pix_fmt)
Definition: drawutils.c:35
ELBGFilterContext::codeword
int * codeword
Definition: vf_elbg.c:44
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
ELBGFilterContext::pal8
int pal8
Definition: vf_elbg.c:52
drawutils.h
AVFILTERPAD_FLAG_NEEDS_WRITABLE
#define AVFILTERPAD_FLAG_NEEDS_WRITABLE
The filter expects writable frames from its input link, duplicating data buffers if needed.
Definition: internal.h:52