FFmpeg
vmaf_motion.h
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2017 Ronald S. Bultje <rsbultje@gmail.com>
3  * Copyright (c) 2017 Ashish Pratap Singh <ashk43712@gmail.com>
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 #ifndef AVFILTER_VMAF_MOTION_H
23 #define AVFILTER_VMAF_MOTION_H
24 
25 #include <stddef.h>
26 #include <stdint.h>
27 #include "video.h"
28 
29 typedef struct VMAFMotionDSPContext {
30  uint64_t (*sad)(const uint16_t *img1, const uint16_t *img2, int w, int h,
31  ptrdiff_t img1_stride, ptrdiff_t img2_stride);
32  void (*convolution_x)(const uint16_t *filter, int filt_w, const uint16_t *src,
33  uint16_t *dst, int w, int h, ptrdiff_t src_stride,
34  ptrdiff_t dst_stride);
35  void (*convolution_y)(const uint16_t *filter, int filt_w, const uint8_t *src,
36  uint16_t *dst, int w, int h, ptrdiff_t src_stride,
37  ptrdiff_t dst_stride);
39 
41 
42 typedef struct VMAFMotionData {
43  uint16_t filter[5];
44  int width;
45  int height;
46  ptrdiff_t stride;
47  uint16_t *blur_data[2 /* cur, prev */];
48  uint16_t *temp_data;
49  double motion_sum;
50  uint64_t nb_frames;
53 
54 int ff_vmafmotion_init(VMAFMotionData *data, int w, int h, enum AVPixelFormat fmt);
57 
58 #endif /* AVFILTER_VMAF_MOTION_H */
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:375
w
uint8_t w
Definition: llviddspenc.c:38
data
const char data[16]
Definition: mxf.c:148
filter
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce then the filter should push the output frames on the output link immediately As an exception to the previous rule if the input frame is enough to produce several output frames then the filter needs output only at least one per link The additional frames can be left buffered in the filter
Definition: filter_design.txt:228
VMAFMotionData::temp_data
uint16_t * temp_data
Definition: vmaf_motion.h:48
video.h
VMAFMotionData::filter
uint16_t filter[5]
Definition: vmaf_motion.h:43
ff_vmafmotion_init_x86
void ff_vmafmotion_init_x86(VMAFMotionDSPContext *dsp)
VMAFMotionData::height
int height
Definition: vmaf_motion.h:45
VMAFMotionData
Definition: vmaf_motion.h:42
ff_vmafmotion_uninit
double ff_vmafmotion_uninit(VMAFMotionData *data)
Definition: vf_vmafmotion.c:293
img1
static uint8_t img1[WIDTH *HEIGHT]
Definition: motion.c:44
VMAFMotionData::nb_frames
uint64_t nb_frames
Definition: vmaf_motion.h:50
VMAFMotionDSPContext::convolution_x
void(* convolution_x)(const uint16_t *filter, int filt_w, const uint16_t *src, uint16_t *dst, int w, int h, ptrdiff_t src_stride, ptrdiff_t dst_stride)
Definition: vmaf_motion.h:32
ff_vmafmotion_process
double ff_vmafmotion_process(VMAFMotionData *data, AVFrame *frame)
Definition: vf_vmafmotion.c:190
VMAFMotionData::stride
ptrdiff_t stride
Definition: vmaf_motion.h:46
VMAFMotionDSPContext::convolution_y
void(* convolution_y)(const uint16_t *filter, int filt_w, const uint8_t *src, uint16_t *dst, int w, int h, ptrdiff_t src_stride, ptrdiff_t dst_stride)
Definition: vmaf_motion.h:35
VMAFMotionDSPContext::sad
uint64_t(* sad)(const uint16_t *img1, const uint16_t *img2, int w, int h, ptrdiff_t img1_stride, ptrdiff_t img2_stride)
Definition: vmaf_motion.h:30
img2
static uint8_t img2[WIDTH *HEIGHT]
Definition: motion.c:45
VMAFMotionData::motion_sum
double motion_sum
Definition: vmaf_motion.h:49
VMAFMotionData::blur_data
uint16_t * blur_data[2]
Definition: vmaf_motion.h:47
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
VMAFMotionData::width
int width
Definition: vmaf_motion.h:44
VMAFMotionDSPContext
Definition: vmaf_motion.h:29
ff_vmafmotion_init
int ff_vmafmotion_init(VMAFMotionData *data, int w, int h, enum AVPixelFormat fmt)
Definition: vf_vmafmotion.c:236
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
VMAFMotionData::vmafdsp
VMAFMotionDSPContext vmafdsp
Definition: vmaf_motion.h:51
h
h
Definition: vp9dsp_template.c:2038