FFmpeg
vf_stack.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2015 Paul B. Mahol
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 #include "libavutil/avstring.h"
22 #include "libavutil/imgutils.h"
23 #include "libavutil/opt.h"
24 #include "libavutil/parseutils.h"
25 #include "libavutil/pixdesc.h"
26 
27 #include "avfilter.h"
28 #include "drawutils.h"
29 #include "formats.h"
30 #include "internal.h"
31 #include "framesync.h"
32 #include "video.h"
33 
34 typedef struct StackItem {
35  int x[4], y[4];
36  int linesize[4];
37  int height[4];
38 } StackItem;
39 
40 typedef struct StackContext {
41  const AVClass *class;
43  int nb_inputs;
44  char *layout;
45  int shortest;
48  int nb_planes;
49  uint8_t fillcolor[4];
52 
55 
59 } StackContext;
60 
62 {
63  StackContext *s = ctx->priv;
64  int reject_flags = AV_PIX_FMT_FLAG_BITSTREAM |
67 
68  if (s->fillcolor_enable) {
70  }
71 
72  return ff_set_common_formats(ctx, ff_formats_pixdesc_filter(0, reject_flags));
73 }
74 
76 {
77  StackContext *s = ctx->priv;
78  int i, ret;
79 
80  if (!strcmp(ctx->filter->name, "vstack"))
81  s->is_vertical = 1;
82 
83  if (!strcmp(ctx->filter->name, "hstack"))
84  s->is_horizontal = 1;
85 
86  s->frames = av_calloc(s->nb_inputs, sizeof(*s->frames));
87  if (!s->frames)
88  return AVERROR(ENOMEM);
89 
90  s->items = av_calloc(s->nb_inputs, sizeof(*s->items));
91  if (!s->items)
92  return AVERROR(ENOMEM);
93 
94  if (!strcmp(ctx->filter->name, "xstack")) {
95  if (strcmp(s->fillcolor_str, "none") &&
96  av_parse_color(s->fillcolor, s->fillcolor_str, -1, ctx) >= 0) {
97  s->fillcolor_enable = 1;
98  } else {
99  s->fillcolor_enable = 0;
100  }
101  if (!s->layout) {
102  if (s->nb_inputs == 2) {
103  s->layout = av_strdup("0_0|w0_0");
104  if (!s->layout)
105  return AVERROR(ENOMEM);
106  } else {
107  av_log(ctx, AV_LOG_ERROR, "No layout specified.\n");
108  return AVERROR(EINVAL);
109  }
110  }
111  }
112 
113  for (i = 0; i < s->nb_inputs; i++) {
114  AVFilterPad pad = { 0 };
115 
116  pad.type = AVMEDIA_TYPE_VIDEO;
117  pad.name = av_asprintf("input%d", i);
118  if (!pad.name)
119  return AVERROR(ENOMEM);
120 
121  if ((ret = ff_append_inpad_free_name(ctx, &pad)) < 0)
122  return ret;
123  }
124 
125  return 0;
126 }
127 
128 static int process_slice(AVFilterContext *ctx, void *arg, int job, int nb_jobs)
129 {
130  StackContext *s = ctx->priv;
131  AVFrame *out = arg;
132  AVFrame **in = s->frames;
133  const int start = (s->nb_inputs * job ) / nb_jobs;
134  const int end = (s->nb_inputs * (job+1)) / nb_jobs;
135 
136  for (int i = start; i < end; i++) {
137  StackItem *item = &s->items[i];
138 
139  for (int p = 0; p < s->nb_planes; p++) {
140  av_image_copy_plane(out->data[p] + out->linesize[p] * item->y[p] + item->x[p],
141  out->linesize[p],
142  in[i]->data[p],
143  in[i]->linesize[p],
144  item->linesize[p], item->height[p]);
145  }
146  }
147 
148  return 0;
149 }
150 
152 {
153  AVFilterContext *ctx = fs->parent;
154  AVFilterLink *outlink = ctx->outputs[0];
155  StackContext *s = fs->opaque;
156  AVFrame **in = s->frames;
157  AVFrame *out;
158  int i, ret;
159 
160  for (i = 0; i < s->nb_inputs; i++) {
161  if ((ret = ff_framesync_get_frame(&s->fs, i, &in[i], 0)) < 0)
162  return ret;
163  }
164 
165  out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
166  if (!out)
167  return AVERROR(ENOMEM);
168  out->pts = av_rescale_q(s->fs.pts, s->fs.time_base, outlink->time_base);
169  out->sample_aspect_ratio = outlink->sample_aspect_ratio;
170 
171  if (s->fillcolor_enable)
172  ff_fill_rectangle(&s->draw, &s->color, out->data, out->linesize,
173  0, 0, outlink->w, outlink->h);
174 
176  FFMIN(s->nb_inputs, ff_filter_get_nb_threads(ctx)));
177 
178  return ff_filter_frame(outlink, out);
179 }
180 
181 static int config_output(AVFilterLink *outlink)
182 {
183  AVFilterContext *ctx = outlink->src;
184  StackContext *s = ctx->priv;
185  AVRational frame_rate = ctx->inputs[0]->frame_rate;
186  AVRational sar = ctx->inputs[0]->sample_aspect_ratio;
187  int height = ctx->inputs[0]->h;
188  int width = ctx->inputs[0]->w;
189  FFFrameSyncIn *in;
190  int i, ret;
191 
192  s->desc = av_pix_fmt_desc_get(outlink->format);
193  if (!s->desc)
194  return AVERROR_BUG;
195 
196  if (s->is_vertical) {
197  for (i = 0; i < s->nb_inputs; i++) {
198  AVFilterLink *inlink = ctx->inputs[i];
199  StackItem *item = &s->items[i];
200 
201  if (ctx->inputs[i]->w != width) {
202  av_log(ctx, AV_LOG_ERROR, "Input %d width %d does not match input %d width %d.\n", i, ctx->inputs[i]->w, 0, width);
203  return AVERROR(EINVAL);
204  }
205 
206  if ((ret = av_image_fill_linesizes(item->linesize, inlink->format, inlink->w)) < 0) {
207  return ret;
208  }
209 
210  item->height[1] = item->height[2] = AV_CEIL_RSHIFT(inlink->h, s->desc->log2_chroma_h);
211  item->height[0] = item->height[3] = inlink->h;
212 
213  if (i) {
214  item->y[1] = item->y[2] = AV_CEIL_RSHIFT(height, s->desc->log2_chroma_h);
215  item->y[0] = item->y[3] = height;
216 
217  height += ctx->inputs[i]->h;
218  }
219  }
220  } else if (s->is_horizontal) {
221  for (i = 0; i < s->nb_inputs; i++) {
222  AVFilterLink *inlink = ctx->inputs[i];
223  StackItem *item = &s->items[i];
224 
225  if (ctx->inputs[i]->h != height) {
226  av_log(ctx, AV_LOG_ERROR, "Input %d height %d does not match input %d height %d.\n", i, ctx->inputs[i]->h, 0, height);
227  return AVERROR(EINVAL);
228  }
229 
230  if ((ret = av_image_fill_linesizes(item->linesize, inlink->format, inlink->w)) < 0) {
231  return ret;
232  }
233 
234  item->height[1] = item->height[2] = AV_CEIL_RSHIFT(inlink->h, s->desc->log2_chroma_h);
235  item->height[0] = item->height[3] = inlink->h;
236 
237  if (i) {
238  if ((ret = av_image_fill_linesizes(item->x, inlink->format, width)) < 0) {
239  return ret;
240  }
241 
242  width += ctx->inputs[i]->w;
243  }
244  }
245  } else {
246  char *arg, *p = s->layout, *saveptr = NULL;
247  char *arg2, *p2, *saveptr2 = NULL;
248  char *arg3, *p3, *saveptr3 = NULL;
249  int inw, inh, size;
250 
251  if (s->fillcolor_enable) {
252  ff_draw_init(&s->draw, ctx->inputs[0]->format, 0);
253  ff_draw_color(&s->draw, &s->color, s->fillcolor);
254  }
255 
256  for (i = 0; i < s->nb_inputs; i++) {
257  AVFilterLink *inlink = ctx->inputs[i];
258  StackItem *item = &s->items[i];
259 
260  if (!(arg = av_strtok(p, "|", &saveptr)))
261  return AVERROR(EINVAL);
262 
263  p = NULL;
264 
265  if ((ret = av_image_fill_linesizes(item->linesize, inlink->format, inlink->w)) < 0) {
266  return ret;
267  }
268 
269  item->height[1] = item->height[2] = AV_CEIL_RSHIFT(inlink->h, s->desc->log2_chroma_h);
270  item->height[0] = item->height[3] = inlink->h;
271 
272  p2 = arg;
273  inw = inh = 0;
274 
275  for (int j = 0; j < 2; j++) {
276  if (!(arg2 = av_strtok(p2, "_", &saveptr2)))
277  return AVERROR(EINVAL);
278 
279  p2 = NULL;
280  p3 = arg2;
281  while ((arg3 = av_strtok(p3, "+", &saveptr3))) {
282  p3 = NULL;
283  if (sscanf(arg3, "w%d", &size) == 1) {
284  if (size == i || size < 0 || size >= s->nb_inputs)
285  return AVERROR(EINVAL);
286 
287  if (!j)
288  inw += ctx->inputs[size]->w;
289  else
290  inh += ctx->inputs[size]->w;
291  } else if (sscanf(arg3, "h%d", &size) == 1) {
292  if (size == i || size < 0 || size >= s->nb_inputs)
293  return AVERROR(EINVAL);
294 
295  if (!j)
296  inw += ctx->inputs[size]->h;
297  else
298  inh += ctx->inputs[size]->h;
299  } else if (sscanf(arg3, "%d", &size) == 1) {
300  if (size < 0)
301  return AVERROR(EINVAL);
302 
303  if (!j)
304  inw += size;
305  else
306  inh += size;
307  } else {
308  return AVERROR(EINVAL);
309  }
310  }
311  }
312 
313  if ((ret = av_image_fill_linesizes(item->x, inlink->format, inw)) < 0) {
314  return ret;
315  }
316 
317  item->y[1] = item->y[2] = AV_CEIL_RSHIFT(inh, s->desc->log2_chroma_h);
318  item->y[0] = item->y[3] = inh;
319 
320  width = FFMAX(width, inlink->w + inw);
321  height = FFMAX(height, inlink->h + inh);
322  }
323  }
324 
325  s->nb_planes = av_pix_fmt_count_planes(outlink->format);
326 
327  outlink->w = width;
328  outlink->h = height;
329  outlink->frame_rate = frame_rate;
330  outlink->sample_aspect_ratio = sar;
331 
332  for (i = 1; i < s->nb_inputs; i++) {
333  AVFilterLink *inlink = ctx->inputs[i];
334  if (outlink->frame_rate.num != inlink->frame_rate.num ||
335  outlink->frame_rate.den != inlink->frame_rate.den) {
337  "Video inputs have different frame rates, output will be VFR\n");
338  outlink->frame_rate = av_make_q(1, 0);
339  break;
340  }
341  }
342 
343  if ((ret = ff_framesync_init(&s->fs, ctx, s->nb_inputs)) < 0)
344  return ret;
345 
346  in = s->fs.in;
347  s->fs.opaque = s;
348  s->fs.on_event = process_frame;
349 
350  for (i = 0; i < s->nb_inputs; i++) {
351  AVFilterLink *inlink = ctx->inputs[i];
352 
353  in[i].time_base = inlink->time_base;
354  in[i].sync = 1;
355  in[i].before = EXT_STOP;
356  in[i].after = s->shortest ? EXT_STOP : EXT_INFINITY;
357  }
358 
359  ret = ff_framesync_configure(&s->fs);
360  outlink->time_base = s->fs.time_base;
361 
362  return ret;
363 }
364 
366 {
367  StackContext *s = ctx->priv;
368 
369  ff_framesync_uninit(&s->fs);
370  av_freep(&s->frames);
371  av_freep(&s->items);
372 }
373 
375 {
376  StackContext *s = ctx->priv;
377  return ff_framesync_activate(&s->fs);
378 }
379 
380 #define OFFSET(x) offsetof(StackContext, x)
381 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_FILTERING_PARAM
382 static const AVOption stack_options[] = {
383  { "inputs", "set number of inputs", OFFSET(nb_inputs), AV_OPT_TYPE_INT, {.i64=2}, 2, INT_MAX, .flags = FLAGS },
384  { "shortest", "force termination when the shortest input terminates", OFFSET(shortest), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, .flags = FLAGS },
385  { NULL },
386 };
387 
388 AVFILTER_DEFINE_CLASS_EXT(stack, "(h|v)stack", stack_options);
389 
390 static const AVFilterPad outputs[] = {
391  {
392  .name = "default",
393  .type = AVMEDIA_TYPE_VIDEO,
394  .config_props = config_output,
395  },
396 };
397 
398 #if CONFIG_HSTACK_FILTER
399 
400 const AVFilter ff_vf_hstack = {
401  .name = "hstack",
402  .description = NULL_IF_CONFIG_SMALL("Stack video inputs horizontally."),
403  .priv_class = &stack_class,
404  .priv_size = sizeof(StackContext),
407  .init = init,
408  .uninit = uninit,
409  .activate = activate,
411 };
412 
413 #endif /* CONFIG_HSTACK_FILTER */
414 
415 #if CONFIG_VSTACK_FILTER
416 
417 const AVFilter ff_vf_vstack = {
418  .name = "vstack",
419  .description = NULL_IF_CONFIG_SMALL("Stack video inputs vertically."),
420  .priv_class = &stack_class,
421  .priv_size = sizeof(StackContext),
424  .init = init,
425  .uninit = uninit,
426  .activate = activate,
428 };
429 
430 #endif /* CONFIG_VSTACK_FILTER */
431 
432 #if CONFIG_XSTACK_FILTER
433 
434 static const AVOption xstack_options[] = {
435  { "inputs", "set number of inputs", OFFSET(nb_inputs), AV_OPT_TYPE_INT, {.i64=2}, 2, INT_MAX, .flags = FLAGS },
436  { "layout", "set custom layout", OFFSET(layout), AV_OPT_TYPE_STRING, {.str=NULL}, 0, 0, .flags = FLAGS },
437  { "shortest", "force termination when the shortest input terminates", OFFSET(shortest), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, .flags = FLAGS },
438  { "fill", "set the color for unused pixels", OFFSET(fillcolor_str), AV_OPT_TYPE_STRING, {.str = "none"}, .flags = FLAGS },
439  { NULL },
440 };
441 
442 AVFILTER_DEFINE_CLASS(xstack);
443 
444 const AVFilter ff_vf_xstack = {
445  .name = "xstack",
446  .description = NULL_IF_CONFIG_SMALL("Stack video inputs into custom layout."),
447  .priv_size = sizeof(StackContext),
448  .priv_class = &xstack_class,
451  .init = init,
452  .uninit = uninit,
453  .activate = activate,
455 };
456 
457 #endif /* CONFIG_XSTACK_FILTER */
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:98
StackItem::x
int x[4]
Definition: vf_stack.c:35
FFFrameSyncIn::time_base
AVRational time_base
Time base for the incoming frames.
Definition: framesync.h:96
ff_framesync_configure
int ff_framesync_configure(FFFrameSync *fs)
Configure a frame sync structure.
Definition: framesync.c:119
StackContext::fillcolor
uint8_t fillcolor[4]
Definition: vf_stack.c:49
FFDrawColor
Definition: drawutils.h:49
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
StackContext::fillcolor_str
char * fillcolor_str
Definition: vf_stack.c:50
ff_framesync_uninit
void ff_framesync_uninit(FFFrameSync *fs)
Free all memory currently allocated.
Definition: framesync.c:285
out
FILE * out
Definition: movenc.c:54
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1018
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2564
av_parse_color
int av_parse_color(uint8_t *rgba_color, const char *color_string, int slen, void *log_ctx)
Put the RGBA values that correspond to color_string in rgba_color.
Definition: parseutils.c:354
ff_framesync_get_frame
int ff_framesync_get_frame(FFFrameSync *fs, unsigned in, AVFrame **rframe, unsigned get)
Get the current frame in an input.
Definition: framesync.c:248
StackContext::fs
FFFrameSync fs
Definition: vf_stack.c:58
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_asprintf
char * av_asprintf(const char *fmt,...)
Definition: avstring.c:113
AVFILTER_DEFINE_CLASS_EXT
AVFILTER_DEFINE_CLASS_EXT(stack, "(h|v)stack", stack_options)
StackContext::color
FFDrawColor color
Definition: vf_stack.c:54
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:303
pixdesc.h
query_formats
static int query_formats(AVFilterContext *ctx)
Definition: vf_stack.c:61
AVOption
AVOption.
Definition: opt.h:247
FILTER_QUERY_FUNC
#define FILTER_QUERY_FUNC(func)
Definition: internal.h:168
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:153
FFFrameSync
Frame sync structure.
Definition: framesync.h:146
EXT_INFINITY
@ EXT_INFINITY
Extend the frame to infinity.
Definition: framesync.h:75
video.h
StackContext::shortest
int shortest
Definition: vf_stack.c:45
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:317
av_image_copy_plane
void av_image_copy_plane(uint8_t *dst, int dst_linesize, const uint8_t *src, int src_linesize, int bytewidth, int height)
Copy image plane from src to dst.
Definition: imgutils.c:374
OFFSET
#define OFFSET(x)
Definition: vf_stack.c:380
formats.h
av_pix_fmt_count_planes
int av_pix_fmt_count_planes(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2604
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_stack.c:365
EXT_STOP
@ EXT_STOP
Completely stop all streams with this one.
Definition: framesync.h:65
AV_PIX_FMT_FLAG_HWACCEL
#define AV_PIX_FMT_FLAG_HWACCEL
Pixel format is an HW accelerated format.
Definition: pixdesc.h:128
FFFrameSyncIn
Input stream structure.
Definition: framesync.h:81
outputs
static const AVFilterPad outputs[]
Definition: vf_stack.c:390
activate
static int activate(AVFilterContext *ctx)
Definition: vf_stack.c:374
StackContext::nb_planes
int nb_planes
Definition: vf_stack.c:48
FFFrameSyncIn::sync
unsigned sync
Synchronization level: frames on input at the highest sync level will generate output frame events.
Definition: framesync.h:139
AVFILTER_FLAG_DYNAMIC_INPUTS
#define AVFILTER_FLAG_DYNAMIC_INPUTS
The number of the filter inputs is not determined just by AVFilter.inputs.
Definition: avfilter.h:110
AVRational::num
int num
Numerator.
Definition: rational.h:59
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:50
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
init
static av_cold int init(AVFilterContext *ctx)
Definition: vf_stack.c:75
av_cold
#define av_cold
Definition: attributes.h:90
ff_set_common_formats
int ff_set_common_formats(AVFilterContext *ctx, AVFilterFormats *formats)
A helper for query_formats() which sets all links to the same list of formats.
Definition: formats.c:699
width
#define width
av_image_fill_linesizes
int av_image_fill_linesizes(int linesizes[4], enum AVPixelFormat pix_fmt, int width)
Fill plane linesizes for an image with pixel format pix_fmt and width width.
Definition: imgutils.c:89
s
#define s(width, name)
Definition: cbs_vp9.c:257
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:51
StackItem::height
int height[4]
Definition: vf_stack.c:37
av_strtok
char * av_strtok(char *s, const char *delim, char **saveptr)
Split the string into several tokens which can be accessed by successive calls to av_strtok().
Definition: avstring.c:186
StackContext::is_vertical
int is_vertical
Definition: vf_stack.c:46
ctx
AVFormatContext * ctx
Definition: movenc.c:48
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:141
ff_draw_init
int ff_draw_init(FFDrawContext *draw, enum AVPixelFormat format, unsigned flags)
Init a draw context.
Definition: drawutils.c:82
process_frame
static int process_frame(FFFrameSync *fs)
Definition: vf_stack.c:151
arg
const char * arg
Definition: jacosubdec.c:67
config_output
static int config_output(AVFilterLink *outlink)
Definition: vf_stack.c:181
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
NULL
#define NULL
Definition: coverity.c:32
fs
#define fs(width, name, subs,...)
Definition: cbs_vp9.c:259
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
ff_append_inpad_free_name
int ff_append_inpad_free_name(AVFilterContext *f, AVFilterPad *p)
Definition: avfilter.c:144
parseutils.h
ff_vf_hstack
const AVFilter ff_vf_hstack
ff_vf_vstack
const AVFilter ff_vf_vstack
StackContext::items
StackItem * items
Definition: vf_stack.c:56
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:117
size
int size
Definition: twinvq_data.h:10344
av_make_q
static AVRational av_make_q(int num, int den)
Create an AVRational.
Definition: rational.h:71
ff_fill_rectangle
void ff_fill_rectangle(FFDrawContext *draw, FFDrawColor *color, uint8_t *dst[], int dst_linesize[], int dst_x, int dst_y, int w, int h)
Fill a rectangle with an uniform color.
Definition: drawutils.c:222
AV_PIX_FMT_FLAG_BITSTREAM
#define AV_PIX_FMT_FLAG_BITSTREAM
All values of a component are bit-wise packed end to end.
Definition: pixdesc.h:124
StackContext::is_horizontal
int is_horizontal
Definition: vf_stack.c:47
StackContext::draw
FFDrawContext draw
Definition: vf_stack.c:53
height
#define height
internal.h
AVFILTER_DEFINE_CLASS
#define AVFILTER_DEFINE_CLASS(fname)
Definition: internal.h:326
StackContext::desc
const AVPixFmtDescriptor * desc
Definition: vf_stack.c:42
layout
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel layout
Definition: filter_design.txt:18
ff_formats_pixdesc_filter
AVFilterFormats * ff_formats_pixdesc_filter(unsigned want, unsigned rej)
Construct a formats list containing all pixel formats with certain properties.
Definition: formats.c:457
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:271
FLAGS
#define FLAGS
Definition: vf_stack.c:381
ff_draw_supported_pixel_formats
AVFilterFormats * ff_draw_supported_pixel_formats(unsigned flags)
Return the list of pixel formats supported by the draw functions.
Definition: drawutils.c:635
ff_filter_get_nb_threads
int ff_filter_get_nb_threads(AVFilterContext *ctx)
Get number of threads for current filter instance.
Definition: avfilter.c:803
StackContext::layout
char * layout
Definition: vf_stack.c:44
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
FFDrawContext
Definition: drawutils.h:35
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:56
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:271
ff_draw_color
void ff_draw_color(FFDrawContext *draw, FFDrawColor *color, const uint8_t rgba[4])
Prepare a color.
Definition: drawutils.c:135
AVFilter
Filter definition.
Definition: avfilter.h:149
ret
ret
Definition: filter_design.txt:187
AVFilterPad::type
enum AVMediaType type
AVFilterPad type.
Definition: internal.h:61
ff_vf_xstack
const AVFilter ff_vf_xstack
ff_framesync_init
int ff_framesync_init(FFFrameSync *fs, AVFilterContext *parent, unsigned nb_in)
Initialize a frame sync structure.
Definition: framesync.c:79
FFFrameSyncIn::before
enum FFFrameSyncExtMode before
Extrapolation mode for timestamps before the first frame.
Definition: framesync.h:86
StackItem::y
int y[4]
Definition: vf_stack.c:35
framesync.h
AVRational::den
int den
Denominator.
Definition: rational.h:60
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:224
avfilter.h
StackItem::linesize
int linesize[4]
Definition: vf_stack.c:36
AVFilterContext
An instance of a filter.
Definition: avfilter.h:386
AVFILTER_FLAG_SLICE_THREADS
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:121
av_strdup
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:279
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
StackContext::fillcolor_enable
int fillcolor_enable
Definition: vf_stack.c:51
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
StackItem
Definition: vf_stack.c:34
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Definition: opt.h:241
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: internal.h:192
process_slice
static int process_slice(AVFilterContext *ctx, void *arg, int job, int nb_jobs)
Definition: vf_stack.c:128
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
StackContext
Definition: vf_stack.c:40
imgutils.h
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:52
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:334
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
FFFrameSyncIn::after
enum FFFrameSyncExtMode after
Extrapolation mode for timestamps after the last frame.
Definition: framesync.h:91
ff_framesync_activate
int ff_framesync_activate(FFFrameSync *fs)
Examine the frames in the filter's input and try to produce output.
Definition: framesync.c:336
avstring.h
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Definition: opt.h:228
drawutils.h
ff_filter_execute
static av_always_inline int ff_filter_execute(AVFilterContext *ctx, avfilter_action_func *func, void *arg, int *ret, int nb_jobs)
Definition: internal.h:143
AV_PIX_FMT_FLAG_PAL
#define AV_PIX_FMT_FLAG_PAL
Pixel format has a palette in data[1], values are indexes in this palette.
Definition: pixdesc.h:120
stack_options
static const AVOption stack_options[]
Definition: vf_stack.c:382
StackContext::nb_inputs
int nb_inputs
Definition: vf_stack.c:43
StackContext::frames
AVFrame ** frames
Definition: vf_stack.c:57