FFmpeg
vf_pp7.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2005 Michael Niedermayer <michaelni@gmx.at>
3  * Copyright (c) 2014 Arwa Arif <arwaarif1994@gmail.com>
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or modify
8  * it under the terms of the GNU General Public License as published by
9  * the Free Software Foundation; either version 2 of the License, or
10  * (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15  * GNU General Public License for more details.
16  *
17  * You should have received a copy of the GNU General Public License along
18  * with FFmpeg; if not, write to the Free Software Foundation, Inc.,
19  * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20  */
21 
22 /**
23  * @file
24  * Postprocessing filter - 7
25  *
26  * Originally written by Michael Niedermayer for the MPlayer
27  * project, and ported by Arwa Arif for FFmpeg.
28  */
29 
30 #include "libavutil/imgutils.h"
31 #include "libavutil/mem_internal.h"
32 #include "libavutil/opt.h"
33 #include "libavutil/pixdesc.h"
34 #include "internal.h"
35 #include "qp_table.h"
36 #include "vf_pp7.h"
37 
38 enum mode {
42 };
43 
44 #define OFFSET(x) offsetof(PP7Context, x)
45 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
46 static const AVOption pp7_options[] = {
47  { "qp", "force a constant quantizer parameter", OFFSET(qp), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 64, FLAGS },
48  { "mode", "set thresholding mode", OFFSET(mode), AV_OPT_TYPE_INT, {.i64 = MODE_MEDIUM}, 0, 2, FLAGS, "mode" },
49  { "hard", "hard thresholding", 0, AV_OPT_TYPE_CONST, {.i64 = MODE_HARD}, INT_MIN, INT_MAX, FLAGS, "mode" },
50  { "soft", "soft thresholding", 0, AV_OPT_TYPE_CONST, {.i64 = MODE_SOFT}, INT_MIN, INT_MAX, FLAGS, "mode" },
51  { "medium", "medium thresholding", 0, AV_OPT_TYPE_CONST, {.i64 = MODE_MEDIUM}, INT_MIN, INT_MAX, FLAGS, "mode" },
52  { NULL }
53 };
54 
56 
57 DECLARE_ALIGNED(8, static const uint8_t, dither)[8][8] = {
58  { 0, 48, 12, 60, 3, 51, 15, 63, },
59  { 32, 16, 44, 28, 35, 19, 47, 31, },
60  { 8, 56, 4, 52, 11, 59, 7, 55, },
61  { 40, 24, 36, 20, 43, 27, 39, 23, },
62  { 2, 50, 14, 62, 1, 49, 13, 61, },
63  { 34, 18, 46, 30, 33, 17, 45, 29, },
64  { 10, 58, 6, 54, 9, 57, 5, 53, },
65  { 42, 26, 38, 22, 41, 25, 37, 21, },
66 };
67 
68 #define N0 4
69 #define N1 5
70 #define N2 10
71 #define SN0 2
72 #define SN1 2.2360679775
73 #define SN2 3.16227766017
74 #define N (1 << 16)
75 
76 static const int factor[16] = {
77  N / (N0 * N0), N / (N0 * N1), N / (N0 * N0), N / (N0 * N2),
78  N / (N1 * N0), N / (N1 * N1), N / (N1 * N0), N / (N1 * N2),
79  N / (N0 * N0), N / (N0 * N1), N / (N0 * N0), N / (N0 * N2),
80  N / (N2 * N0), N / (N2 * N1), N / (N2 * N0), N / (N2 * N2),
81 };
82 
83 static void init_thres2(PP7Context *p)
84 {
85  int qp, i;
86  int bias = 0; //FIXME
87 
88  for (qp = 0; qp < 99; qp++) {
89  for (i = 0; i < 16; i++) {
90  p->thres2[qp][i] = ((i&1) ? SN2 : SN0) * ((i&4) ? SN2 : SN0) * FFMAX(1, qp) * (1<<2) - 1 - bias;
91  }
92  }
93 }
94 
95 static inline void dctA_c(int16_t *dst, uint8_t *src, int stride)
96 {
97  int i;
98 
99  for (i = 0; i < 4; i++) {
100  int s0 = src[0 * stride] + src[6 * stride];
101  int s1 = src[1 * stride] + src[5 * stride];
102  int s2 = src[2 * stride] + src[4 * stride];
103  int s3 = src[3 * stride];
104  int s = s3 + s3;
105  s3 = s - s0;
106  s0 = s + s0;
107  s = s2 + s1;
108  s2 = s2 - s1;
109  dst[0] = s0 + s;
110  dst[2] = s0 - s;
111  dst[1] = 2 * s3 + s2;
112  dst[3] = s3 - 2 * s2;
113  src++;
114  dst += 4;
115  }
116 }
117 
118 static void dctB_c(int16_t *dst, int16_t *src)
119 {
120  int i;
121 
122  for (i = 0; i < 4; i++) {
123  int s0 = src[0 * 4] + src[6 * 4];
124  int s1 = src[1 * 4] + src[5 * 4];
125  int s2 = src[2 * 4] + src[4 * 4];
126  int s3 = src[3 * 4];
127  int s = s3 + s3;
128  s3 = s - s0;
129  s0 = s + s0;
130  s = s2 + s1;
131  s2 = s2 - s1;
132  dst[0 * 4] = s0 + s;
133  dst[2 * 4] = s0 - s;
134  dst[1 * 4] = 2 * s3 + s2;
135  dst[3 * 4] = s3 - 2 * s2;
136  src++;
137  dst++;
138  }
139 }
140 
141 static int hardthresh_c(PP7Context *p, int16_t *src, int qp)
142 {
143  int i;
144  int a;
145 
146  a = src[0] * factor[0];
147  for (i = 1; i < 16; i++) {
148  unsigned int threshold1 = p->thres2[qp][i];
149  unsigned int threshold2 = threshold1 << 1;
150  int level = src[i];
151  if (((unsigned)(level + threshold1)) > threshold2)
152  a += level * factor[i];
153  }
154  return (a + (1 << 11)) >> 12;
155 }
156 
157 static int mediumthresh_c(PP7Context *p, int16_t *src, int qp)
158 {
159  int i;
160  int a;
161 
162  a = src[0] * factor[0];
163  for (i = 1; i < 16; i++) {
164  unsigned int threshold1 = p->thres2[qp][i];
165  unsigned int threshold2 = threshold1 << 1;
166  int level = src[i];
167  if (((unsigned)(level + threshold1)) > threshold2) {
168  if (((unsigned)(level + 2 * threshold1)) > 2 * threshold2)
169  a += level * factor[i];
170  else {
171  if (level > 0)
172  a += 2 * (level - (int)threshold1) * factor[i];
173  else
174  a += 2 * (level + (int)threshold1) * factor[i];
175  }
176  }
177  }
178  return (a + (1 << 11)) >> 12;
179 }
180 
181 static int softthresh_c(PP7Context *p, int16_t *src, int qp)
182 {
183  int i;
184  int a;
185 
186  a = src[0] * factor[0];
187  for (i = 1; i < 16; i++) {
188  unsigned int threshold1 = p->thres2[qp][i];
189  unsigned int threshold2 = threshold1 << 1;
190  int level = src[i];
191  if (((unsigned)(level + threshold1)) > threshold2) {
192  if (level > 0)
193  a += (level - (int)threshold1) * factor[i];
194  else
195  a += (level + (int)threshold1) * factor[i];
196  }
197  }
198  return (a + (1 << 11)) >> 12;
199 }
200 
201 static void filter(PP7Context *p, uint8_t *dst, uint8_t *src,
202  int dst_stride, int src_stride,
203  int width, int height,
204  uint8_t *qp_store, int qp_stride, int is_luma)
205 {
206  int x, y;
207  const int stride = is_luma ? p->temp_stride : ((width + 16 + 15) & (~15));
208  uint8_t *p_src = p->src + 8 * stride;
209  int16_t *block = (int16_t *)p->src;
210  int16_t *temp = (int16_t *)(p->src + 32);
211 
212  if (!src || !dst) return;
213  for (y = 0; y < height; y++) {
214  int index = 8 + 8 * stride + y * stride;
215  memcpy(p_src + index, src + y * src_stride, width);
216  for (x = 0; x < 8; x++) {
217  p_src[index - x - 1]= p_src[index + x ];
218  p_src[index + width + x ]= p_src[index + width - x - 1];
219  }
220  }
221  for (y = 0; y < 8; y++) {
222  memcpy(p_src + ( 7 - y ) * stride, p_src + ( y + 8 ) * stride, stride);
223  memcpy(p_src + (height + 8 + y) * stride, p_src + (height - y + 7) * stride, stride);
224  }
225  //FIXME (try edge emu)
226 
227  for (y = 0; y < height; y++) {
228  for (x = -8; x < 0; x += 4) {
229  const int index = x + y * stride + (8 - 3) * (1 + stride) + 8; //FIXME silly offset
230  uint8_t *src = p_src + index;
231  int16_t *tp = temp + 4 * x;
232 
233  dctA_c(tp + 4 * 8, src, stride);
234  }
235  for (x = 0; x < width; ) {
236  const int qps = 3 + is_luma;
237  int qp;
238  int end = FFMIN(x + 8, width);
239 
240  if (p->qp)
241  qp = p->qp;
242  else {
243  qp = qp_store[ (FFMIN(x, width - 1) >> qps) + (FFMIN(y, height - 1) >> qps) * qp_stride];
244  qp = ff_norm_qscale(qp, p->qscale_type);
245  }
246  for (; x < end; x++) {
247  const int index = x + y * stride + (8 - 3) * (1 + stride) + 8; //FIXME silly offset
248  uint8_t *src = p_src + index;
249  int16_t *tp = temp + 4 * x;
250  int v;
251 
252  if ((x & 3) == 0)
253  dctA_c(tp + 4 * 8, src, stride);
254 
255  p->dctB(block, tp);
256 
257  v = p->requantize(p, block, qp);
258  v = (v + dither[y & 7][x & 7]) >> 6;
259  if ((unsigned)v > 255)
260  v = (-v) >> 31;
261  dst[x + y * dst_stride] = v;
262  }
263  }
264  }
265 }
266 
268 {
269  static const enum AVPixelFormat pix_fmts[] = {
277  };
278 
280  if (!fmts_list)
281  return AVERROR(ENOMEM);
282  return ff_set_common_formats(ctx, fmts_list);
283 }
284 
286 {
287  AVFilterContext *ctx = inlink->dst;
288  PP7Context *pp7 = ctx->priv;
289  const int h = FFALIGN(inlink->h + 16, 16);
291 
292  pp7->hsub = desc->log2_chroma_w;
293  pp7->vsub = desc->log2_chroma_h;
294 
295  pp7->temp_stride = FFALIGN(inlink->w + 16, 16);
296  pp7->src = av_malloc_array(pp7->temp_stride, (h + 8) * sizeof(uint8_t));
297 
298  if (!pp7->src)
299  return AVERROR(ENOMEM);
300 
301  init_thres2(pp7);
302 
303  switch (pp7->mode) {
304  case 0: pp7->requantize = hardthresh_c; break;
305  case 1: pp7->requantize = softthresh_c; break;
306  default:
307  case 2: pp7->requantize = mediumthresh_c; break;
308  }
309 
310  pp7->dctB = dctB_c;
311 
312  if (ARCH_X86)
313  ff_pp7_init_x86(pp7);
314 
315  return 0;
316 }
317 
319 {
320  AVFilterContext *ctx = inlink->dst;
321  PP7Context *pp7 = ctx->priv;
322  AVFilterLink *outlink = ctx->outputs[0];
323  AVFrame *out = in;
324 
325  int qp_stride = 0;
326  int8_t *qp_table = NULL;
327 
328  if (!pp7->qp) {
329  int ret = ff_qp_table_extract(in, &qp_table, &qp_stride, NULL, &pp7->qscale_type);
330  if (ret < 0) {
331  av_frame_free(&in);
332  return ret;
333  }
334  }
335 
336  if (!ctx->is_disabled) {
337  const int cw = AV_CEIL_RSHIFT(inlink->w, pp7->hsub);
338  const int ch = AV_CEIL_RSHIFT(inlink->h, pp7->vsub);
339 
340  /* get a new frame if in-place is not possible or if the dimensions
341  * are not multiple of 8 */
342  if (!av_frame_is_writable(in) || (inlink->w & 7) || (inlink->h & 7)) {
343  const int aligned_w = FFALIGN(inlink->w, 8);
344  const int aligned_h = FFALIGN(inlink->h, 8);
345 
346  out = ff_get_video_buffer(outlink, aligned_w, aligned_h);
347  if (!out) {
348  av_frame_free(&in);
349  av_freep(&qp_table);
350  return AVERROR(ENOMEM);
351  }
353  out->width = in->width;
354  out->height = in->height;
355  }
356 
357  if (qp_table || pp7->qp) {
358 
359  filter(pp7, out->data[0], in->data[0], out->linesize[0], in->linesize[0],
360  inlink->w, inlink->h, qp_table, qp_stride, 1);
361  filter(pp7, out->data[1], in->data[1], out->linesize[1], in->linesize[1],
362  cw, ch, qp_table, qp_stride, 0);
363  filter(pp7, out->data[2], in->data[2], out->linesize[2], in->linesize[2],
364  cw, ch, qp_table, qp_stride, 0);
365  emms_c();
366  }
367  }
368 
369  if (in != out) {
370  if (in->data[3])
371  av_image_copy_plane(out->data[3], out->linesize[3],
372  in ->data[3], in ->linesize[3],
373  inlink->w, inlink->h);
374  av_frame_free(&in);
375  }
376  av_freep(&qp_table);
377  return ff_filter_frame(outlink, out);
378 }
379 
381 {
382  PP7Context *pp7 = ctx->priv;
383  av_freep(&pp7->src);
384 }
385 
386 static const AVFilterPad pp7_inputs[] = {
387  {
388  .name = "default",
389  .type = AVMEDIA_TYPE_VIDEO,
390  .config_props = config_input,
391  .filter_frame = filter_frame,
392  },
393  { NULL }
394 };
395 
396 static const AVFilterPad pp7_outputs[] = {
397  {
398  .name = "default",
399  .type = AVMEDIA_TYPE_VIDEO,
400  },
401  { NULL }
402 };
403 
405  .name = "pp7",
406  .description = NULL_IF_CONFIG_SMALL("Apply Postprocessing 7 filter."),
407  .priv_size = sizeof(PP7Context),
408  .uninit = uninit,
410  .inputs = pp7_inputs,
411  .outputs = pp7_outputs,
412  .priv_class = &pp7_class,
414 };
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:97
OFFSET
#define OFFSET(x)
Definition: vf_pp7.c:44
stride
int stride
Definition: mace.c:144
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
level
uint8_t level
Definition: svq3.c:204
qp_table.h
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
ff_make_format_list
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:286
mem_internal.h
out
FILE * out
Definition: movenc.c:54
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:978
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2540
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:112
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:303
pixdesc.h
index
fg index
Definition: ffmpeg_filter.c:168
AVFrame::width
int width
Definition: frame.h:361
pp7_options
static const AVOption pp7_options[]
Definition: vf_pp7.c:46
AVOption
AVOption.
Definition: opt.h:247
AV_PIX_FMT_YUV440P
@ AV_PIX_FMT_YUV440P
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:99
N2
#define N2
Definition: vf_pp7.c:70
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:149
MODE_HARD
@ MODE_HARD
Definition: vf_pp7.c:39
query_formats
static int query_formats(AVFilterContext *ctx)
Definition: vf_pp7.c:267
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:317
av_image_copy_plane
void av_image_copy_plane(uint8_t *dst, int dst_linesize, const uint8_t *src, int src_linesize, int bytewidth, int height)
Copy image plane from src to dst.
Definition: imgutils.c:374
AVFilterFormats
A list of supported formats for one end of a filter link.
Definition: formats.h:64
init_thres2
static void init_thres2(PP7Context *p)
Definition: vf_pp7.c:83
PP7Context::requantize
int(* requantize)(struct PP7Context *p, int16_t *src, int qp)
Definition: vf_pp7.h:39
s3
#define s3
Definition: regdef.h:40
softthresh_c
static int softthresh_c(PP7Context *p, int16_t *src, int qp)
Definition: vf_pp7.c:181
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(pp7)
dctB_c
static void dctB_c(int16_t *dst, int16_t *src)
Definition: vf_pp7.c:118
FLAGS
#define FLAGS
Definition: vf_pp7.c:45
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:54
SN0
#define SN0
Definition: vf_pp7.c:71
uninit
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_pp7.c:380
av_cold
#define av_cold
Definition: attributes.h:90
inputs
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several inputs
Definition: filter_design.txt:243
ff_set_common_formats
int ff_set_common_formats(AVFilterContext *ctx, AVFilterFormats *formats)
A helper for query_formats() which sets all links to the same list of formats.
Definition: formats.c:580
hardthresh_c
static int hardthresh_c(PP7Context *p, int16_t *src, int qp)
Definition: vf_pp7.c:141
AV_PIX_FMT_YUVJ422P
@ AV_PIX_FMT_YUVJ422P
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:79
width
#define width
s
#define s(width, name)
Definition: cbs_vp9.c:257
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:58
s1
#define s1
Definition: regdef.h:38
outputs
static const AVFilterPad outputs[]
Definition: af_acontrast.c:203
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:290
ctx
AVFormatContext * ctx
Definition: movenc.c:48
filter
static void filter(PP7Context *p, uint8_t *dst, uint8_t *src, int dst_stride, int src_stride, int width, int height, uint8_t *qp_store, int qp_stride, int is_luma)
Definition: vf_pp7.c:201
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
PP7Context::qscale_type
int qscale_type
Definition: vf_pp7.h:33
ff_vf_pp7
const AVFilter ff_vf_pp7
Definition: vf_pp7.c:404
AV_PIX_FMT_YUVJ444P
@ AV_PIX_FMT_YUVJ444P
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:80
NULL
#define NULL
Definition: coverity.c:32
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:537
N
#define N
Definition: vf_pp7.c:74
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:78
mediumthresh_c
static int mediumthresh_c(PP7Context *p, int16_t *src, int qp)
Definition: vf_pp7.c:157
src
#define src
Definition: vp8dsp.c:255
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:74
PP7Context::temp_stride
int temp_stride
Definition: vf_pp7.h:36
s2
#define s2
Definition: regdef.h:39
SN2
#define SN2
Definition: vf_pp7.c:73
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:116
FFMAX
#define FFMAX(a, b)
Definition: common.h:103
N0
#define N0
Definition: vf_pp7.c:68
dither
static const uint8_t dither[8][8]
Definition: vf_pp7.c:57
pp7_outputs
static const AVFilterPad pp7_outputs[]
Definition: vf_pp7.c:396
vf_pp7.h
av_frame_is_writable
int av_frame_is_writable(AVFrame *frame)
Check if the frame data is writable.
Definition: frame.c:473
ff_norm_qscale
static int ff_norm_qscale(int qscale, int type)
Normalize the qscale factor FIXME the H264 qscale is a log based scale, mpeg1/2 is not,...
Definition: internal.h:351
filter_frame
static int filter_frame(AVFilterLink *inlink, AVFrame *in)
Definition: vf_pp7.c:318
height
#define height
FFMIN
#define FFMIN(a, b)
Definition: common.h:105
a
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:41
dctA_c
static void dctA_c(int16_t *dst, uint8_t *src, int stride)
Definition: vf_pp7.c:95
PP7Context::hsub
int hsub
Definition: vf_pp7.h:34
internal.h
DECLARE_ALIGNED
#define DECLARE_ALIGNED(n, t, v)
Definition: mem.h:116
i
int i
Definition: input.c:406
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:32
PP7Context::src
uint8_t * src
Definition: vf_pp7.h:37
AV_PIX_FMT_YUVJ440P
@ AV_PIX_FMT_YUVJ440P
planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P and setting color_range
Definition: pixfmt.h:100
MODE_SOFT
@ MODE_SOFT
Definition: vf_pp7.c:40
PP7Context::thres2
int thres2[99][16]
Definition: vf_pp7.h:29
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:60
pp7_inputs
static const AVFilterPad pp7_inputs[]
Definition: vf_pp7.c:386
PP7Context::vsub
int vsub
Definition: vf_pp7.h:35
PP7Context::qp
int qp
Definition: vf_pp7.h:31
AVFilter
Filter definition.
Definition: avfilter.h:145
ret
ret
Definition: filter_design.txt:187
ff_pp7_init_x86
void ff_pp7_init_x86(PP7Context *pp7)
Definition: vf_pp7_init.c:28
N1
#define N1
Definition: vf_pp7.c:69
AVFrame::height
int height
Definition: frame.h:361
PP7Context::mode
int mode
Definition: vf_pp7.h:32
mode
mode
Definition: ebur128.h:83
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:224
temp
else temp
Definition: vf_mcdeint.c:259
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
AVFilterContext
An instance of a filter.
Definition: avfilter.h:333
factor
static const int factor[16]
Definition: vf_pp7.c:76
AV_PIX_FMT_GBRP
@ AV_PIX_FMT_GBRP
planar GBR 4:4:4 24bpp
Definition: pixfmt.h:158
desc
const char * desc
Definition: libsvtav1.c:79
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
ff_qp_table_extract
int ff_qp_table_extract(AVFrame *frame, int8_t **table, int *table_w, int *table_h, int *qscale_type)
Extract a libpostproc-compatible QP table - an 8-bit QP value per 16x16 macroblock,...
Definition: qp_table.c:30
s0
#define s0
Definition: regdef.h:37
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:48
MODE_MEDIUM
@ MODE_MEDIUM
Definition: vf_pp7.c:41
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
AV_PIX_FMT_YUV411P
@ AV_PIX_FMT_YUV411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:73
PP7Context
Definition: vf_pp7.h:27
AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL
#define AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL
Same as AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC, except that the filter will have its filter_frame() c...
Definition: avfilter.h:134
imgutils.h
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:561
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:334
AV_PIX_FMT_YUV410P
@ AV_PIX_FMT_YUV410P
planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
Definition: pixfmt.h:72
block
The exact code depends on how similar the blocks are and how related they are to the block
Definition: filter_design.txt:207
h
h
Definition: vp9dsp_template.c:2038
config_input
static int config_input(AVFilterLink *inlink)
Definition: vf_pp7.c:285
int
int
Definition: ffmpeg_filter.c:156
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:233
PP7Context::dctB
void(* dctB)(int16_t *dst, int16_t *src)
Definition: vf_pp7.h:40