Go to the documentation of this file.
21 #include "config_components.h"
40 16384, 16320, 16257, 16194, 16132, 16070, 16009, 15948, 15888, 15828, 15768,
41 15709, 15650, 15592, 15534, 15477, 15420, 15364, 15308, 15252, 15197, 15142,
42 15087, 15033, 14980, 14926, 14873, 14821, 14769, 14717, 14665, 14614, 14564,
43 14513, 14463, 14413, 14364, 14315, 14266, 14218, 14170, 14122, 14075, 14028,
44 13981, 13935, 13888, 13843, 13797, 13752, 13707, 13662, 13618, 13574, 13530,
45 13487, 13443, 13400, 13358, 13315, 13273, 13231, 13190, 13148, 13107, 13066,
46 13026, 12985, 12945, 12906, 12866, 12827, 12788, 12749, 12710, 12672, 12633,
47 12596, 12558, 12520, 12483, 12446, 12409, 12373, 12336, 12300, 12264, 12228,
48 12193, 12157, 12122, 12087, 12053, 12018, 11984, 11950, 11916, 11882, 11848,
49 11815, 11782, 11749, 11716, 11683, 11651, 11619, 11586, 11555, 11523, 11491,
50 11460, 11429, 11398, 11367, 11336, 11305, 11275, 11245, 11215, 11185, 11155,
51 11125, 11096, 11067, 11038, 11009, 10980, 10951, 10923, 10894, 10866, 10838,
52 10810, 10782, 10755, 10727, 10700, 10673, 10645, 10618, 10592, 10565, 10538,
53 10512, 10486, 10460, 10434, 10408, 10382, 10356, 10331, 10305, 10280, 10255,
54 10230, 10205, 10180, 10156, 10131, 10107, 10082, 10058, 10034, 10010, 9986,
55 9963, 9939, 9916, 9892, 9869, 9846, 9823, 9800, 9777, 9754, 9732,
56 9709, 9687, 9664, 9642, 9620, 9598, 9576, 9554, 9533, 9511, 9489,
57 9468, 9447, 9425, 9404, 9383, 9362, 9341, 9321, 9300, 9279, 9259,
58 9239, 9218, 9198, 9178, 9158, 9138, 9118, 9098, 9079, 9059, 9039,
59 9020, 9001, 8981, 8962, 8943, 8924, 8905, 8886, 8867, 8849, 8830,
60 8812, 8793, 8775, 8756, 8738, 8720, 8702, 8684, 8666, 8648, 8630,
61 8613, 8595, 8577, 8560, 8542, 8525, 8508, 8490, 8473, 8456, 8439,
62 8422, 8405, 8389, 8372, 8355, 8339, 8322, 8306, 8289, 8273, 8257,
63 8240, 8224, 8208, 8192
71 return r - ((v + 1) >> 1);
95 uint8_t primary_frame, prev_frame;
96 uint32_t abs_bits, prec_bits,
round, prec_diff,
sub, mx;
99 primary_frame =
s->raw_frame_header->primary_ref_frame;
100 prev_frame =
s->raw_frame_header->ref_frame_idx[primary_frame];
109 prev_gm_param =
s->cur_frame.gm_params[
ref][idx];
111 prev_gm_param =
s->ref[prev_frame].gm_params[
ref][idx];
116 !
s->raw_frame_header->allow_high_precision_mv;
118 !
s->raw_frame_header->allow_high_precision_mv;
126 sub = (idx % 3) == 2 ? (1 << prec_bits) : 0;
128 r = (prev_gm_param >> prec_diff) -
sub;
130 s->cur_frame.gm_params[
ref][idx] =
132 -mx, mx + 1,
r) << prec_diff) +
round;
139 return ((x + ((uint64_t)1 << (n - 1))) >> n);
156 e =
d - (1 << (*shift));
173 int16_t
alpha, beta, gamma,
delta, divf, divs;
175 int32_t *param = &
s->cur_frame.gm_params[idx][0];
183 w = (int64_t)param[3] * param[4];
210 for (
int i = 0;
i < 6;
i++)
211 s->cur_frame.gm_params[
ref][
i] = (
i % 3 == 2) ?
238 s->cur_frame.gm_params[
ref][4] = -
s->cur_frame.gm_params[
ref][3];
239 s->cur_frame.gm_params[
ref][5] =
s->cur_frame.gm_params[
ref][2];
253 unsigned int a,
unsigned int b)
255 unsigned int diff =
a -
b;
257 return (
diff & (m - 1)) - (
diff & m);
265 int forward_idx, backward_idx;
266 int forward_hint, backward_hint;
267 int second_forward_idx, second_forward_hint;
268 int ref_hint, dist,
i;
270 if (!
header->skip_mode_present)
276 ref_hint =
s->ref[
header->ref_frame_idx[
i]].raw_frame_header->order_hint;
279 if (forward_idx < 0 ||
282 forward_hint = ref_hint;
284 }
else if (dist > 0) {
285 if (backward_idx < 0 ||
288 backward_hint = ref_hint;
293 if (forward_idx < 0) {
295 }
else if (backward_idx >= 0) {
296 s->cur_frame.skip_mode_frame_idx[0] =
298 s->cur_frame.skip_mode_frame_idx[1] =
303 second_forward_idx = -1;
305 ref_hint =
s->ref[
header->ref_frame_idx[
i]].raw_frame_header->order_hint;
307 if (second_forward_idx < 0 ||
309 second_forward_idx =
i;
310 second_forward_hint = ref_hint;
315 if (second_forward_idx < 0)
318 s->cur_frame.skip_mode_frame_idx[0] =
320 s->cur_frame.skip_mode_frame_idx[1] =
332 s->cur_frame.coded_lossless = 0;
336 s->cur_frame.coded_lossless = 1;
340 qindex = (
header->base_q_idx +
343 qindex =
header->base_q_idx;
348 s->cur_frame.coded_lossless = 0;
364 memcpy(dst, film_grain,
sizeof(*dst));
370 memcpy(dst,
src,
sizeof(*dst));
378 s->raw_frame_header->tile_cols *
s->raw_frame_header->tile_rows;
379 if (
s->tile_num < cur_tile_num) {
387 s->tile_num = cur_tile_num;
396 uint16_t tile_num, tile_row, tile_col;
397 uint32_t
size = 0, size_bytes = 0;
402 s->tg_end = tile_group->
tg_end;
404 for (tile_num = tile_group->
tg_start; tile_num <= tile_group->tg_end; tile_num++) {
405 tile_row = tile_num /
s->raw_frame_header->tile_cols;
406 tile_col = tile_num %
s->raw_frame_header->tile_cols;
408 if (tile_num == tile_group->
tg_end) {
411 s->tile_group_info[tile_num].tile_row = tile_row;
412 s->tile_group_info[tile_num].tile_column = tile_col;
415 size_bytes =
s->raw_frame_header->tile_size_bytes_minus1 + 1;
419 for (
int i = 0;
i < size_bytes;
i++)
420 size |= bytestream2_get_byteu(&gb) << 8 *
i;
425 s->tile_group_info[tile_num].tile_size =
size;
427 s->tile_group_info[tile_num].tile_row = tile_row;
428 s->tile_group_info[tile_num].tile_column = tile_col;
444 #define HWACCEL_MAX (CONFIG_AV1_DXVA2_HWACCEL + \
445 CONFIG_AV1_D3D11VA_HWACCEL * 2 + \
446 CONFIG_AV1_NVDEC_HWACCEL + \
447 CONFIG_AV1_VAAPI_HWACCEL + \
448 CONFIG_AV1_VDPAU_HWACCEL)
513 #if CONFIG_AV1_DXVA2_HWACCEL
516 #if CONFIG_AV1_D3D11VA_HWACCEL
520 #if CONFIG_AV1_NVDEC_HWACCEL
523 #if CONFIG_AV1_VAAPI_HWACCEL
526 #if CONFIG_AV1_VDPAU_HWACCEL
531 #if CONFIG_AV1_DXVA2_HWACCEL
534 #if CONFIG_AV1_D3D11VA_HWACCEL
538 #if CONFIG_AV1_NVDEC_HWACCEL
541 #if CONFIG_AV1_VAAPI_HWACCEL
544 #if CONFIG_AV1_VDPAU_HWACCEL
549 #if CONFIG_AV1_NVDEC_HWACCEL
554 #if CONFIG_AV1_NVDEC_HWACCEL
574 " hardware accelerated AV1 decoding.\n");
588 f->hwaccel_picture_private =
NULL;
590 f->raw_frame_header =
NULL;
591 f->spatial_id =
f->temporal_id = 0;
592 memset(
f->skip_mode_frame_idx, 0,
593 2 *
sizeof(uint8_t));
594 memset(&
f->film_grain, 0,
sizeof(
f->film_grain));
595 f->coded_lossless = 0;
615 if (
src->hwaccel_picture_private) {
634 src->skip_mode_frame_idx,
635 2 *
sizeof(uint8_t));
666 while (
s->itut_t35_fifo &&
av_fifo_read(
s->itut_t35_fifo, &itut_t35, 1) >= 0)
731 int r_width =
header->render_width_minus_1 + 1;
732 int r_height =
header->render_height_minus_1 + 1;
742 (int64_t)
height * r_width,
743 (int64_t)
width * r_height,
778 "Failed to allocate reference frame buffer %d.\n",
i);
784 if (!
s->cur_frame.f) {
786 "Failed to allocate current frame buffer.\n");
799 if (!
s->itut_t35_fifo)
802 av_opt_set_int(
s->cbc->priv_data,
"operating_point",
s->operating_point, 0);
851 switch (
header->frame_type) {
867 f->hwaccel_priv_buf =
869 if (!
f->hwaccel_priv_buf) {
873 f->hwaccel_picture_private =
f->hwaccel_priv_buf->data;
887 int ret, provider_code;
891 provider_code = bytestream2_get_be16(&gb);
892 switch (provider_code) {
894 uint32_t user_identifier = bytestream2_get_be32(&gb);
895 switch (user_identifier) {
896 case MKBETAG(
'G',
'A',
'9',
'4'): {
918 int provider_oriented_code = bytestream2_get_be16(&gb);
919 int application_identifier = bytestream2_get_byte(&gb);
922 provider_oriented_code != 1 || application_identifier != 4)
953 for (
int i = 0;
i < 3;
i++) {
972 light->
MaxCLL =
s->cll->max_cll;
1027 for (
int i = 0;
i < 24;
i++) {
1030 for (
int i = 0;
i < 25;
i++) {
1049 const AVFrame *srcframe =
s->cur_frame.f;
1053 if (
s->operating_point_idc &&
1054 av_log2(
s->operating_point_idc >> 8) >
s->cur_frame.spatial_id)
1077 #if FF_API_FRAME_PKT
1095 if (
header->refresh_frame_flags & (1 <<
i)) {
1099 "Failed to update frame %d in reference list\n",
i);
1115 if (!
s->cur_frame.header_ref)
1118 s->cur_frame.raw_frame_header =
s->raw_frame_header;
1137 "Failed to allocate space for current frame.\n");
1162 s->current_obu.nb_units);
1164 for (
int i = 0;
i <
s->current_obu.nb_units;
i++) {
1175 switch (unit->
type) {
1193 s->operating_point_idc =
s->raw_seq->operating_point_idc[
s->operating_point];
1199 "Failed to get pixel format.\n");
1216 if (
s->raw_frame_header)
1229 if (!
s->header_ref) {
1239 if (
s->raw_frame_header->show_existing_frame) {
1243 &
s->ref[
s->raw_frame_header->frame_to_show_map_idx]);
1255 if (
s->cur_frame.f->buf[0]) {
1261 s->raw_frame_header =
NULL;
1272 s->cur_frame.spatial_id =
header->spatial_id;
1273 s->cur_frame.temporal_id =
header->temporal_id;
1275 if (avctx->
hwaccel &&
s->cur_frame.f->buf[0]) {
1287 if (!
s->raw_frame_header) {
1302 if (avctx->
hwaccel &&
s->cur_frame.f->buf[0]) {
1308 "HW accel decode slice fail.\n");
1364 if (raw_tile_group && (
s->tile_num == raw_tile_group->
tg_end + 1)) {
1365 if (avctx->
hwaccel &&
s->cur_frame.f->buf[0]) {
1379 if (
s->raw_frame_header->show_frame &&
s->cur_frame.f->buf[0]) {
1386 raw_tile_group =
NULL;
1387 s->raw_frame_header =
NULL;
1394 s->raw_frame_header =
NULL;
1407 s->operating_point_idc = 0;
1408 s->raw_frame_header =
NULL;
1418 #define OFFSET(x) offsetof(AV1DecContext, x)
1419 #define VD AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_DECODING_PARAM
1421 {
"operating_point",
"Select an operating point of the scalable bitstream",
1448 .bsfs =
"av1_frame_split",
1450 #if CONFIG_AV1_DXVA2_HWACCEL
1453 #if CONFIG_AV1_D3D11VA_HWACCEL
1456 #if CONFIG_AV1_D3D11VA2_HWACCEL
1459 #if CONFIG_AV1_NVDEC_HWACCEL
1462 #if CONFIG_AV1_VAAPI_HWACCEL
1465 #if CONFIG_AV1_VDPAU_HWACCEL
uint32_t num_units_in_display_tick
const struct AVHWAccel * hwaccel
Hardware accelerator in use.
#define FF_ENABLE_DEPRECATION_WARNINGS
static void bit_depth(AudioStatsContext *s, uint64_t mask, uint64_t imask, AVRational *depth)
#define AV_LOG_WARNING
Something somehow does not look correct.
@ AV_PIX_FMT_CUDA
HW acceleration through CUDA.
AVPixelFormat
Pixel format.
@ AV1_METADATA_TYPE_HDR_MDCV
static av_cold int av1_decode_init(AVCodecContext *avctx)
@ AV1_OBU_REDUNDANT_FRAME_HEADER
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
uint8_t clip_to_restricted_range
enum AVColorSpace colorspace
YUV colorspace type.
#define ff_thread_get_format
static void coded_lossless_param(AV1DecContext *s)
static float sub(float src0, float src1)
av_cold void ff_cbs_fragment_free(CodedBitstreamFragment *frag)
Free the units array of a fragment in addition to what ff_cbs_fragment_reset does.
static int av1_frame_alloc(AVCodecContext *avctx, AV1Frame *f)
uint8_t * data
The data buffer.
@ AV_FRAME_DATA_A53_CC
ATSC A53 Part 4 Closed Captions.
uint8_t point_cb_value[10]
void * content
Pointer to the decomposed form of this unit.
static av_always_inline void bytestream2_skipu(GetByteContext *g, unsigned int size)
uint8_t uv_points[2][10][2]
static int get_current_frame(AVCodecContext *avctx)
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
unsigned MaxCLL
Max content light level (cd/m^2).
This structure describes decoded (raw) audio or video data.
enum AVColorTransferCharacteristic color_trc
Color Transfer Characteristic.
@ AV1_GM_ABS_TRANS_ONLY_BITS
static int set_context_with_sequence(AVCodecContext *avctx, const AV1RawSequenceHeader *seq)
#define HWACCEL_DXVA2(codec)
uint8_t point_y_value[14]
@ AVCOL_RANGE_JPEG
Full range content.
@ AV1_METADATA_TYPE_ITUT_T35
void ff_cbs_fragment_reset(CodedBitstreamFragment *frag)
Free the units contained in a fragment as well as the fragment's own data buffer, but not the units a...
AV1RawFrameHeader * raw_frame_header
#define HWACCEL_D3D11VA2(codec)
#define AV_PIX_FMT_YUV420P10
@ AV_PIX_FMT_D3D11VA_VLD
HW decoding through Direct3D11 via old API, Picture.data[3] contains a ID3D11VideoDecoderOutputView p...
@ AV1_WARP_PARAM_REDUCE_BITS
CodedBitstreamUnitType type
Codec-specific type of this unit.
static const CodedBitstreamUnitType decompose_unit_types[]
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
@ AV1_OBU_TEMPORAL_DELIMITER
static int get_relative_dist(const AV1RawSequenceHeader *seq, unsigned int a, unsigned int b)
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
@ AV1_GM_TRANS_ONLY_PREC_BITS
uint64_t seed
Seed to use for the synthesis process, if the codec allows for it.
Coded bitstream unit structure.
av_cold void ff_cbs_close(CodedBitstreamContext **ctx_ptr)
Close a context and free all internal state.
static uint64_t round_two(uint64_t x, uint16_t n)
Content light level needed by to transmit HDR over HDMI (CTA-861.3).
uint8_t chroma_scaling_from_luma
uint8_t ar_coeffs_cr_plus_128[25]
AVCodec p
The public AVCodec.
static const AVOption av1_options[]
static int av1_frame_ref(AVCodecContext *avctx, AV1Frame *dst, const AV1Frame *src)
enum AVDiscard skip_frame
Skip decoding for selected frames.
int av_fifo_write(AVFifo *f, const void *buf, size_t nb_elems)
Write data into a FIFO.
int grain_scale_shift
Signals the down shift applied to the generated gaussian numbers during synthesis.
uint8_t point_cr_scaling[10]
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have so the codec calls ff_thread_report set FF_CODEC_CAP_ALLOCATE_PROGRESS in AVCodec caps_internal and use ff_thread_get_buffer() to allocate frames. The frames must then be freed with ff_thread_release_buffer(). Otherwise decode directly into the user-supplied frames. Call ff_thread_report_progress() after some part of the current picture has decoded. A good place to put this is where draw_horiz_band() is called - add this if it isn 't called anywhere
static uint32_t inverse_recenter(int r, uint32_t v)
#define HWACCEL_VDPAU(codec)
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
uint8_t ar_coeff_shift_minus_6
uint8_t * data
Pointer to the directly-parsable bitstream form of this unit.
static int av1_decode_frame(AVCodecContext *avctx, AVFrame *frame, int *got_frame, AVPacket *pkt)
int limit_output_range
Signals to clip to limited color levels after film grain application.
int num_y_points
Number of points, and the scale and value for each point of the piecewise linear scaling function for...
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
static int64_t round_two_signed(int64_t x, uint16_t n)
This structure describes how to handle film grain synthesis for AOM codecs.
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
#define AV_PIX_FMT_YUV444P10
enum AVColorPrimaries color_primaries
Chromaticity coordinates of the source primaries.
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
#define FF_ARRAY_ELEMS(a)
@ AV1_METADATA_TYPE_HDR_CLL
int av_fifo_read(AVFifo *f, void *buf, size_t nb_elems)
Read data from a FIFO.
uint8_t grain_scaling_minus_8
static int set_output_frame(AVCodecContext *avctx, AVFrame *frame, const AVPacket *pkt, int *got_frame)
static void global_motion_params(AV1DecContext *s)
update gm type/params, since cbs already implemented part of this function, so we don't need to full ...
#define FF_CODEC_DECODE_CB(func)
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
AVBufferRef * hwaccel_priv_buf
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
static enum AVPixelFormat pix_fmt
int ticks_per_frame
For some codecs, the time base is closer to the field rate than the frame rate.
AVFilmGrainParams * av_film_grain_params_create_side_data(AVFrame *frame)
Allocate a complete AVFilmGrainParams and add it to the frame.
AV1RawFilmGrainParams film_grain
static av_cold int av1_decode_free(AVCodecContext *avctx)
int(* init)(AVBSFContext *ctx)
static enum AVPixelFormat pix_fmts[]
static int init_tile_data(AV1DecContext *s)
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
static int update_reference_list(AVCodecContext *avctx)
const AVProfile ff_av1_profiles[]
static void av1_frame_unref(AVCodecContext *avctx, AV1Frame *f)
uint8_t transfer_characteristics
int(* decode_params)(AVCodecContext *avctx, int type, const uint8_t *buf, uint32_t buf_size)
Callback for parameter data (SPS/PPS/VPS etc).
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
static int get_tiles_info(AVCodecContext *avctx, const AV1RawTileGroup *tile_group)
static int export_itut_t35(AVCodecContext *avctx, AVFrame *frame, const AV1RawMetadataITUTT35 *itut_t35)
union AVFilmGrainParams::@320 codec
Additional fields may be added both here and in any structure included.
#define CODEC_LONG_NAME(str)
#define FF_CODEC_PROPERTY_FILM_GRAIN
static void load_grain_params(AV1DecContext *s)
#define AV_PIX_FMT_GRAY10
const FFCodec ff_av1_decoder
@ AV1_WARP_MODEL_TRANSLATION
@ AVDISCARD_ALL
discard all
int ff_parse_a53_cc(AVBufferRef **pbuf, const uint8_t *data, int size)
Parse a data array for ATSC A53 Part 4 Closed Captions and store them in an AVBufferRef.
#define LIBAVUTIL_VERSION_INT
Describe the class of an AVClass context structure.
int32_t gm_params[AV1_NUM_REF_FRAMES][6]
enum AVColorRange color_range
MPEG vs JPEG YUV range.
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
@ AVCHROMA_LOC_LEFT
MPEG-2/4 4:2:0, H.264 default for 4:2:0.
Rational number (pair of numerator and denominator).
@ AVCHROMA_LOC_TOPLEFT
ITU-R 601, SMPTE 274M 296M S314M(DV 4:1:1), mpeg2 4:2:2.
const char * av_default_item_name(void *ptr)
Return the context name.
int(* end_frame)(AVCodecContext *avctx)
Called at the end of each frame or field picture.
@ AV_PICTURE_TYPE_I
Intra.
uint8_t chroma_sample_position
size_t data_size
The number of bytes in the bitstream (including any padding bits in the final byte).
int ff_set_sar(AVCodecContext *avctx, AVRational sar)
Check that the provided sample aspect ratio is valid and set it on the codec context.
AVFrameSideData * av_frame_new_side_data_from_buf(AVFrame *frame, enum AVFrameSideDataType type, AVBufferRef *buf)
Add a new side data to a frame from an existing AVBufferRef.
static int update_context_with_frame_header(AVCodecContext *avctx, const AV1RawFrameHeader *header)
#define AV_PIX_FMT_YUV422P10
static void skip_mode_params(AV1DecContext *s)
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
@ AV_PICTURE_TYPE_SP
Switching Predicted.
int av_opt_set_int(void *obj, const char *name, int64_t val, int search_flags)
static av_always_inline int bytestream2_get_bytes_left(GetByteContext *g)
static av_always_inline int bytestream2_tell(GetByteContext *g)
static int16_t resolve_divisor(uint32_t d, uint16_t *shift)
Resolve divisor process.
int num_uv_points[2]
If chroma_scaling_from_luma is set to 0, signals the chroma scaling function parameters.
uint8_t point_cr_value[10]
@ AVDISCARD_NONKEY
discard all frames except keyframes
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
static uint32_t decode_unsigned_subexp_with_ref(uint32_t sub_exp, int mx, int r)
uint32_t num_ticks_per_picture_minus_1
static int shift(int a, int b)
#define AV_PIX_FMT_YUV422P12
AV1RawSequenceHeader sequence_header
static AVRational av_make_q(int num, int den)
Create an AVRational.
uint8_t skip_mode_frame_idx[2]
#define MKBETAG(a, b, c, d)
#define AV_PIX_FMT_YUV444P12
This structure describes how to handle film grain synthesis in video for specific codecs.
@ AV1_OBU_SEQUENCE_HEADER
static const uint8_t header[24]
static av_always_inline int diff(const struct color_info *a, const struct color_info *b, const int trans_thresh)
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed.
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
int av_reallocp_array(void *ptr, size_t nmemb, size_t size)
Allocate, reallocate an array through a pointer to a pointer.
#define HWACCEL_D3D11VA(codec)
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
uint8_t grain_scale_shift
#define HWACCEL_NVDEC(codec)
@ AV1_WARP_MODEL_IDENTITY
int8_t ar_coeffs_y[24]
Luma auto-regression coefficients.
@ AV_PIX_FMT_VAAPI
Hardware acceleration through VA-API, data[3] contains a VASurfaceID.
AV1RawFrameHeader frame_header
int ff_cbs_read_extradata_from_codec(CodedBitstreamContext *ctx, CodedBitstreamFragment *frag, const AVCodecContext *avctx)
void ff_thread_release_buffer(AVCodecContext *avctx, AVFrame *f)
Wrapper around release_buffer() frame-for multithreaded codecs.
static void av1_decode_flush(AVCodecContext *avctx)
@ AV_PIX_FMT_VDPAU
HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface.
static int export_film_grain(AVCodecContext *avctx, AVFrame *frame)
@ AVDISCARD_NONINTRA
discard all non intra frames
#define i(width, name, range_min, range_max)
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
unsigned properties
Properties of the stream that gets decoded.
static av_always_inline av_const double round(double x)
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
int(* decode_slice)(AVCodecContext *avctx, const uint8_t *buf, uint32_t buf_size)
Callback for each slice.
static const uint16_t div_lut[AV1_DIV_LUT_NUM]
< same with Div_Lut defined in spec 7.11.3.7
void * hwaccel_picture_private
uint8_t equal_picture_interval
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
int scaling_shift
Specifies the shift applied to the chroma components.
const char * name
Name of the codec implementation.
int av_buffer_replace(AVBufferRef **pdst, const AVBufferRef *src)
Ensure dst refers to the same data as src.
enum AVChromaLocation chroma_sample_location
This defines the location of chroma samples.
#define FF_CODEC_CAP_SETS_PKT_DTS
Decoders marked with FF_CODEC_CAP_SETS_PKT_DTS want to set AVFrame.pkt_dts manually.
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
AV1RawTileGroup tile_group
This struct represents dynamic metadata for color volume transform - application 4 of SMPTE 2094-40:2...
static int av_cmp_q(AVRational a, AVRational b)
Compare two rationals.
static const AVClass av1_class
AVBufferRef * content_ref
If content is reference counted, a reference to the buffer containing content.
AVBufferRef * av_buffer_allocz(size_t size)
Same as av_buffer_alloc(), except the returned buffer will be initialized to zero.
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
uint8_t gm_invalid[AV1_NUM_REF_FRAMES]
uint8_t film_grain_params_ref_idx
static void read_global_param(AV1DecContext *s, int type, int ref, int idx)
AVFifo * av_fifo_alloc2(size_t nb_elems, size_t elem_size, unsigned int flags)
Allocate and initialize an AVFifo with a given element size.
#define AV_PIX_FMT_YUV420P12
@ AV1_WARPEDMODEL_PREC_BITS
main external API structure.
static uint8_t get_shear_params_valid(AV1DecContext *s, int idx)
check if global motion params is valid.
int ar_coeff_lag
Specifies the auto-regression lag.
uint32_t CodedBitstreamUnitType
The codec-specific type of a bitstream unit.
int uv_offset[2]
Offset used for component scaling function.
int frame_priv_data_size
Size of per-frame hardware accelerator private data.
@ AV1_MAX_OPERATING_POINTS
static int export_metadata(AVCodecContext *avctx, AVFrame *frame)
int uv_mult[2]
Specifies the luma/chroma multipliers for the index to the component scaling function.
static int ref[MAX_W *MAX_W]
int export_side_data
Bit set of AV_CODEC_EXPORT_DATA_* flags, which affects the kind of metadata exported in frame,...
uint8_t ar_coeffs_cb_plus_128[25]
uint8_t ar_coeffs_y_plus_128[24]
#define FF_CODEC_PROPERTY_CLOSED_CAPTIONS
int ff_cbs_read_packet(CodedBitstreamContext *ctx, CodedBitstreamFragment *frag, const AVPacket *pkt)
Read the data bitstream from a packet into a fragment, then split into units and decompose.
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
uint8_t point_y_scaling[14]
int(* start_frame)(AVCodecContext *avctx, const uint8_t *buf, uint32_t buf_size)
Called at the beginning of each frame or field picture.
#define FF_DISABLE_DEPRECATION_WARNINGS
int overlap_flag
Signals whether to overlap film grain blocks.
@ AV_PICTURE_TYPE_P
Predicted.
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
A reference to a data buffer.
uint8_t point_cb_scaling[10]
AV1RawTileGroup tile_group
static int32_t decode_signed_subexp_with_ref(uint32_t sub_exp, int low, int high, int r)
av_cold int ff_cbs_init(CodedBitstreamContext **ctx_ptr, enum AVCodecID codec_id, void *log_ctx)
Create and initialise a new context for the given codec.
static int get_pixel_format(AVCodecContext *avctx)
static const int16_t alpha[]
This structure stores compressed data.
unsigned MaxFALL
Max average light level per frame (cd/m^2).
#define HWACCEL_VAAPI(codec)
int width
picture width / height.
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
static int FUNC() sequence_header(CodedBitstreamContext *ctx, RWContext *rw, MPEG2RawSequenceHeader *current)
void av_fifo_freep2(AVFifo **f)
Free an AVFifo and reset pointer to NULL.
#define AV_CODEC_CAP_AVOID_PROBING
Decoder is not a preferred choice for probing.
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
#define AV_PIX_FMT_GRAY12
int chroma_scaling_from_luma
Signals whether to derive the chroma scaling function from the luma.
av_cold void ff_cbs_flush(CodedBitstreamContext *ctx)
Reset all internal state in a context.
uint8_t gm_type[AV1_NUM_REF_FRAMES]
@ AV_FILM_GRAIN_PARAMS_AV1
The union is valid when interpreted as AVFilmGrainAOMParams (codec.aom)
enum AVFilmGrainParamsType type
Specifies the codec for which this structure is valid.
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel.
#define AV_CODEC_EXPORT_DATA_FILM_GRAIN
Decoding only.
#define AV_FIFO_FLAG_AUTO_GROW
Automatically resize the FIFO on writes, so that the data fits.
int ar_coeff_shift
Specifies the range of the auto-regressive coefficients.
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
int8_t ar_coeffs_uv[2][25]
Chroma auto-regression coefficients.