Go to the documentation of this file.
24 #define X265_API_IMPORTS 1
45 #if defined(X265_ENABLE_ALPHA) && MAX_LAYERS > 2
46 #define FF_X265_MAX_LAYERS MAX_LAYERS
47 #elif X265_BUILD >= 210
48 #define FF_X265_MAX_LAYERS 2
50 #define FF_X265_MAX_LAYERS 1
97 case NAL_UNIT_CODED_SLICE_BLA_W_LP:
98 case NAL_UNIT_CODED_SLICE_BLA_W_RADL:
99 case NAL_UNIT_CODED_SLICE_BLA_N_LP:
100 case NAL_UNIT_CODED_SLICE_IDR_W_RADL:
101 case NAL_UNIT_CODED_SLICE_IDR_N_LP:
102 case NAL_UNIT_CODED_SLICE_CRA:
116 for (
int i = 0;
i <
ctx->nb_rd;
i++)
117 if (!
ctx->rd[
i].in_use) {
118 ctx->rd[
i].in_use = 1;
125 memset(
tmp +
ctx->nb_rd, 0,
sizeof(*
tmp) * add);
130 idx =
ctx->nb_rd - add;
131 ctx->rd[idx].in_use = 1;
140 memset(&
ctx->rd[idx], 0,
sizeof(
ctx->rd[idx]));
147 ctx->api->param_free(
ctx->params);
150 for (
int i = 0;
i <
ctx->nb_rd;
i++)
155 ctx->api->encoder_close(
ctx->encoder);
169 if (
ctx->api->param_parse(
ctx->params,
key, buf) == X265_PARAM_BAD_VALUE) {
184 if (
ctx->api->param_parse(
ctx->params,
key, buf) == X265_PARAM_BAD_VALUE) {
196 char buf[10 * 20 +
sizeof(
"G(,)B(,)R(,)WP(,)L(,)")];
200 "G(%"PRId64
",%"PRId64
")B(%"PRId64
",%"PRId64
")R(%"PRId64
",%"PRId64
")"
201 "WP(%"PRId64
",%"PRId64
")L(%"PRId64
",%"PRId64
")",
213 if (api->param_parse(params,
"master-display", buf) ==
214 X265_PARAM_BAD_VALUE) {
216 "Invalid value \"%s\" for param \"master-display\".\n",
239 params->maxCLL = cll->
MaxCLL;
240 params->maxFALL = cll->
MaxFALL;
261 ctx->api = x265_api_get(
desc->comp[0].depth);
263 ctx->api = x265_api_get(0);
265 ctx->params =
ctx->api->param_alloc();
271 if (
ctx->api->param_default_preset(
ctx->params,
ctx->preset,
ctx->tune) < 0) {
276 for (
i = 0; x265_preset_names[
i];
i++)
281 for (
i = 0; x265_tune_names[
i];
i++)
297 #if FF_API_TICKS_PER_FRAME
303 ctx->params->sourceWidth = avctx->
width;
304 ctx->params->sourceHeight = avctx->
height;
309 if (
ctx->params->sourceWidth < 64 ||
ctx->params->sourceHeight < 64)
310 ctx->params->maxCUSize = 32;
311 if (
ctx->params->sourceWidth < 32 ||
ctx->params->sourceHeight < 32)
312 ctx->params->maxCUSize = 16;
313 if (
ctx->params->sourceWidth < 16 ||
ctx->params->sourceHeight < 16) {
315 ctx->params->sourceWidth,
ctx->params->sourceHeight);
320 ctx->params->vui.bEnableVideoSignalTypePresentFlag = 1;
323 ctx->params->vui.bEnableVideoFullRangeFlag =
326 ctx->params->vui.bEnableVideoFullRangeFlag =
336 ctx->params->vui.bEnableColorDescriptionPresentFlag = 1;
340 ctx->params->vui.transferCharacteristics = avctx->
color_trc;
341 #if X265_BUILD >= 159
343 ctx->params->preferredTransferCharacteristics =
ctx->params->vui.transferCharacteristics;
351 ctx->params->vui.bEnableChromaLocInfoPresentFlag =
353 desc->log2_chroma_w == 1 &&
desc->log2_chroma_h == 1;
355 if (
ctx->params->vui.bEnableChromaLocInfoPresentFlag) {
356 ctx->params->vui.chromaSampleLocTypeTopField =
357 ctx->params->vui.chromaSampleLocTypeBottomField =
363 int sar_num, sar_den;
368 snprintf(sar,
sizeof(sar),
"%d:%d", sar_num, sar_den);
369 if (
ctx->api->param_parse(
ctx->params,
"sar", sar) == X265_PARAM_BAD_VALUE) {
375 switch (
desc->log2_chroma_w) {
379 if (
desc->nb_components == 1) {
380 if (
ctx->api->api_build_number < 85) {
382 "libx265 version is %d, must be at least 85 for gray encoding.\n",
383 ctx->api->api_build_number);
386 ctx->params->internalCsp = X265_CSP_I400;
393 ctx->params->vui.bEnableVideoSignalTypePresentFlag = 1;
394 ctx->params->vui.bEnableColorDescriptionPresentFlag = 1;
397 ctx->params->internalCsp = X265_CSP_I444;
401 ctx->params->internalCsp =
desc->log2_chroma_h == 1 ?
402 X265_CSP_I420 : X265_CSP_I422;
406 "Pixel format '%s' cannot be mapped to a libx265 CSP!\n",
422 if (
ctx->api->param_parse(
ctx->params,
"crf", crf) == X265_PARAM_BAD_VALUE) {
428 ctx->params->rc.rateControlMode = X265_RC_ABR;
429 }
else if (
ctx->cqp >= 0) {
435 if (avctx->
qmin >= 0) {
440 if (avctx->
qmax >= 0) {
450 if (avctx->
qblur >= 0) {
482 ctx->params->bRepeatHeaders = 1;
499 if (avctx->
refs >= 0) {
511 if (!strncmp(en->
key,
"alpha", 5)) {
512 if (
desc->nb_components == 4) {
514 "Ignoring redundant \"alpha\" option.\n");
518 "Alpha encoding was requested through an unsupported "
519 "option when no alpha plane is present\n");
523 parse_ret =
ctx->api->param_parse(
ctx->params, en->
key, en->
value);
525 case X265_PARAM_BAD_NAME:
527 "Unknown option: %s.\n", en->
key);
529 case X265_PARAM_BAD_VALUE:
531 "Invalid value for %s: %s.\n", en->
key, en->
value);
540 ctx->params->rc.vbvBufferInit == 0.9) {
545 if (
ctx->api->param_apply_profile(
ctx->params,
ctx->profile) < 0) {
549 for (
i = 0; x265_profile_names[
i];
i++)
556 #if X265_BUILD >= 167
557 ctx->dovi.logctx = avctx;
560 ctx->params->dolbyProfile =
ctx->dovi.cfg.dv_profile * 10 +
561 ctx->dovi.cfg.dv_bl_signal_compatibility_id;
564 #if X265_BUILD >= 210 && FF_X265_MAX_LAYERS > 1
566 if (
ctx->api->param_parse(
ctx->params,
"alpha",
"1") < 0) {
567 av_log(avctx,
AV_LOG_ERROR,
"Loaded libx265 does not support alpha layer encoding.\n");
573 ctx->encoder =
ctx->api->encoder_open(
ctx->params);
594 "Cannot allocate HEVC header of size %d.\n", avctx->
extradata_size);
610 if (
ctx->params->rc.aqMode == X265_AQ_NONE) {
611 if (!
ctx->roi_warned) {
617 int mb_size = (
ctx->params->rc.qgSize == 8) ? 8 : 16;
618 int mbx = (
frame->width + mb_size - 1) / mb_size;
619 int mby = (
frame->height + mb_size - 1) / mb_size;
620 int qp_range = 51 + 6 * (pic->bitDepth - 8);
628 if (!roi_size || sd->
size % roi_size != 0) {
632 nb_rois = sd->
size / roi_size;
634 qoffsets =
av_calloc(mbx * mby,
sizeof(*qoffsets));
640 for (
int i = nb_rois - 1;
i >= 0;
i--) {
641 int startx, endx, starty, endy;
646 starty =
FFMIN(mby, roi->
top / mb_size);
647 endy =
FFMIN(mby, (roi->
bottom + mb_size - 1)/ mb_size);
648 startx =
FFMIN(mbx, roi->
left / mb_size);
649 endx =
FFMIN(mbx, (roi->
right + mb_size - 1)/ mb_size);
657 qoffset =
av_clipf(qoffset * qp_range, -qp_range, +qp_range);
659 for (
int y = starty; y < endy; y++)
660 for (
int x = startx; x < endx; x++)
661 qoffsets[x + y*mbx] = qoffset;
664 pic->quantOffsets = qoffsets;
672 x265_sei *
sei = &pic->userSEI;
673 for (
int i = 0;
i <
sei->numPayloads;
i++)
676 #if X265_BUILD >= 167
681 int idx = (int)(intptr_t)pic->userData - 1;
683 pic->userData =
NULL;
687 sei->numPayloads = 0;
691 const AVFrame *pic,
int *got_packet)
695 x265_picture x265pic;
697 #if (X265_BUILD >= 210) && (X265_BUILD < 213)
709 ctx->api->picture_init(
ctx->params, &x265pic);
711 sei = &x265pic.userSEI;
712 sei->numPayloads = 0;
719 for (
i = 0;
i <
desc->nb_components;
i++) {
720 x265pic.planes[
i] = pic->
data[
i];
724 x265pic.pts = pic->
pts;
728 (
ctx->forced_idr ? X265_TYPE_IDR : X265_TYPE_I) :
742 rd = &
ctx->rd[rd_idx];
755 x265pic.userData = (
void*)(intptr_t)(rd_idx + 1);
764 }
else if (sei_data) {
766 x265_sei_payload *sei_payload;
770 (
sei->numPayloads + 1) *
sizeof(*sei_payload));
777 sei->payloads =
ctx->sei_data;
778 sei_payload = &
sei->payloads[
sei->numPayloads];
779 sei_payload->payload = sei_data;
780 sei_payload->payloadSize = sei_size;
790 x265_sei_payload *sei_payload;
797 (
sei->numPayloads + 1) *
sizeof(*sei_payload));
803 sei->payloads =
ctx->sei_data;
804 sei_payload = &
sei->payloads[
sei->numPayloads];
806 if (!sei_payload->payload) {
810 sei_payload->payloadSize = side_data->
size;
817 #if X265_BUILD >= 167
819 if (
ctx->dovi.cfg.dv_profile && sd) {
822 &x265pic.rpu.payload,
823 &x265pic.rpu.payloadSize);
828 }
else if (
ctx->dovi.cfg.dv_profile) {
830 "without AV_FRAME_DATA_DOVI_METADATA");
837 #if (X265_BUILD >= 210) && (X265_BUILD < 213)
839 x265pic_lyrptr_out[
i] = &x265pic_out[
i];
841 ret =
ctx->api->encoder_encode(
ctx->encoder, &nal, &nnal,
842 pic ? &x265pic :
NULL, x265pic_lyrptr_out);
844 ret =
ctx->api->encoder_encode(
ctx->encoder, &nal, &nnal,
845 pic ? &x265pic :
NULL, x265pic_out);
848 for (
i = 0;
i <
sei->numPayloads;
i++)
858 for (
i = 0;
i < nnal;
i++)
859 payload += nal[
i].sizeBytes;
868 for (
i = 0;
i < nnal;
i++) {
869 memcpy(
dst, nal[
i].payload, nal[
i].sizeBytes);
870 dst += nal[
i].sizeBytes;
876 pkt->
pts = x265pic_out->pts;
877 pkt->
dts = x265pic_out->dts;
879 switch (x265pic_out->sliceType) {
896 #if X265_BUILD >= 130
897 if (x265pic_out->sliceType == X265_TYPE_B)
899 if (x265pic_out->frameData.sliceType ==
'b')
905 if (x265pic_out->userData) {
906 int idx = (int)(intptr_t)x265pic_out->userData - 1;
933 #if X265_BUILD >= 210 && FF_X265_MAX_LAYERS > 1
953 #if X265_BUILD >= 210 && FF_X265_MAX_LAYERS > 1
979 #if X265_BUILD >= 210 && FF_X265_MAX_LAYERS > 1
993 if (x265_api_get(12)) {
996 }
else if (x265_api_get(10)) {
999 }
else if (x265_api_get(8)) {
1010 #define OFFSET(x) offsetof(libx265Context, x)
1011 #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
1015 {
"forced-idr",
"if forcing keyframes, force them as IDR frames",
OFFSET(forced_idr),
AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1,
VE },
1019 {
"udu_sei",
"Use user data unregistered SEI if available",
OFFSET(udu_sei),
AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1,
VE },
1021 {
"x265-params",
"set the x265 configuration using a :-separated list of key=value parameters",
OFFSET(x265_opts),
AV_OPT_TYPE_DICT, { 0 }, 0, 0,
VE },
1022 #if X265_BUILD >= 167
1029 static const AVClass class = {
1040 {
"keyint_min",
"-1" },
1047 {
"i_qfactor",
"-1" },
1048 {
"b_qfactor",
"-1" },
1053 .
p.
name =
"libx265",
1061 .p.priv_class = &
class,
1062 .p.wrapper_name =
"libx265",
#define FF_ENABLE_DEPRECATION_WARNINGS
int ff_alloc_a53_sei(const AVFrame *frame, size_t prefix_len, void **data, size_t *sei_size)
Check AVFrame for A53 side data and allocate and fill SEI message with A53 info.
#define AV_LOG_WARNING
Something somehow does not look correct.
AVPixelFormat
Pixel format.
int keyint_min
minimum GOP size
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
void ff_dovi_ctx_unref(DOVIContext *s)
Completely reset a DOVIContext, preserving only logctx.
static av_cold int libx265_param_parse_int(AVCodecContext *avctx, const char *key, int value)
static av_cold int libx265_param_parse_float(AVCodecContext *avctx, const char *key, float value)
enum AVColorSpace colorspace
YUV colorspace type.
AVFrameSideData ** decoded_side_data
Array containing static side data, such as HDR10 CLL / MDCV structures.
static const FFCodecDefault defaults[]
AVFrameSideData * av_frame_get_side_data(const AVFrame *frame, enum AVFrameSideDataType type)
int64_t duration
Duration of the frame, in the same units as pts.
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
static const AVOption options[]
@ AV_FRAME_DATA_DOVI_METADATA
Parsed Dolby Vision metadata, suitable for passing to a software implementation.
void * opaque
Frame owner's private data.
unsigned MaxCLL
Max content light level (cd/m^2).
This structure describes decoded (raw) audio or video data.
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
enum AVColorTransferCharacteristic color_trc
Color Transfer Characteristic.
@ AVCOL_RANGE_JPEG
Full range content.
int depth
Number of bits in the component.
#define AV_PIX_FMT_YUVA420P10
static void free_picture(libx265Context *ctx, x265_picture *pic)
#define AV_PIX_FMT_YUV420P10
#define FF_CODEC_CAP_NOT_INIT_THREADSAFE
The codec is not known to be init-threadsafe (i.e.
@ AVCOL_SPC_RGB
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1
int64_t duration
Duration of this packet in AVStream->time_base units, 0 if unknown.
#define AV_PKT_FLAG_DISPOSABLE
Flag is used to indicate packets that contain frames that can be discarded by the decoder.
#define AV_CODEC_FLAG_PSNR
error[?] variables will be set during encoding.
@ AV_CODEC_CONFIG_PIX_FORMAT
AVPixelFormat, terminated by AV_PIX_FMT_NONE.
int qmax
maximum quantizer
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
#define AV_CODEC_FLAG_GLOBAL_HEADER
Place global headers in extradata instead of every keyframe.
int roi_warned
If the encoder does not support ROI then warn the first time we encounter a frame with ROI side data.
void * av_memdup(const void *p, size_t size)
Duplicate a buffer with av_malloc().
AVBufferRef * opaque_ref
Frame owner's private data.
Content light level needed by to transmit HDR over HDMI (CTA-861.3).
#define FF_X265_MAX_LAYERS
static int libx265_get_supported_config(const AVCodecContext *avctx, const AVCodec *codec, enum AVCodecConfig config, unsigned flags, const void **out, int *out_num)
static int libx265_encode_frame(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *pic, int *got_packet)
#define AV_CODEC_FLAG_COPY_OPAQUE
float i_quant_factor
qscale factor between P- and I-frames If > 0 then the last P-frame quantizer will be used (q = lastp_...
AVCodec p
The public AVCodec.
AVBufferRef * opaque_ref
AVBufferRef for free use by the API user.
int thread_count
thread count is used to decide how many independent tasks should be passed to execute()
#define AV_PIX_FMT_GBRP10
int refs
number of reference frames
int flags
AV_CODEC_FLAG_*.
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
#define FF_CODEC_ENCODE_CB(func)
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
int ff_dovi_configure(DOVIContext *s, AVCodecContext *avctx)
Helper wrapper around ff_dovi_configure_ext which infers the codec parameters from an AVCodecContext.
#define FF_DOVI_AUTOMATIC
Enable tri-state.
#define AV_PIX_FMT_YUV444P10
enum AVColorPrimaries color_primaries
Chromaticity coordinates of the source primaries.
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
#define FF_ARRAY_ELEMS(a)
Structure describing a single Region Of Interest.
int rc_initial_buffer_occupancy
Number of bits which should be loaded into the rc buffer before decoding starts.
@ AV_PIX_FMT_YUVJ422P
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
void * av_fast_realloc(void *ptr, unsigned int *size, size_t min_size)
Reallocate the given buffer if it is not large enough, otherwise do nothing.
@ AV_PIX_FMT_YUVA420P
planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
void * av_realloc_array(void *ptr, size_t nmemb, size_t size)
static void rd_release(libx265Context *ctx, int idx)
#define AV_CODEC_CAP_OTHER_THREADS
Codec supports multithreading through a method other than slice- or frame-level multithreading.
#define AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE
This encoder can reorder user opaque values from input AVFrames and return them with corresponding ou...
#define av_assert0(cond)
assert() equivalent, that is always enabled.
#define AV_PIX_FMT_FLAG_ALPHA
The pixel format has an alpha channel.
@ SEI_TYPE_USER_DATA_REGISTERED_ITU_T_T35
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
int64_t rc_max_rate
maximum bitrate
void * opaque
for some private data of the user
This structure describes the bitrate properties of an encoded bitstream.
#define CODEC_LONG_NAME(str)
@ AV_PIX_FMT_YUVJ444P
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
#define AV_PIX_FMT_GRAY10
int rc_buffer_size
decoder bitstream buffer size
#define LIBAVUTIL_VERSION_INT
Describe the class of an AVClass context structure.
enum AVColorRange color_range
MPEG vs JPEG YUV range.
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
int ff_dovi_rpu_generate(DOVIContext *s, const AVDOVIMetadata *metadata, int flags, uint8_t **out_rpu, int *out_size)
Synthesize a Dolby Vision RPU reflecting the current state.
Rational number (pair of numerator and denominator).
float qblur
amount of qscale smoothing over time (0.0-1.0)
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
int64_t bit_rate
the average bitrate
static av_cold int libx265_encode_init(AVCodecContext *avctx)
@ AV_OPT_TYPE_DICT
Underlying C type is AVDictionary*.
uint32_t self_size
Must be set to the size of this data structure (that is, sizeof(AVRegionOfInterest)).
const char * av_default_item_name(void *ptr)
Return the context name.
@ AV_PICTURE_TYPE_I
Intra.
@ AV_FRAME_DATA_MASTERING_DISPLAY_METADATA
Mastering display metadata associated with a video frame.
#define AV_PIX_FMT_YUV422P10
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
static int FUNC() sei(CodedBitstreamContext *ctx, RWContext *rw, H264RawSEI *current)
@ AVCOL_RANGE_UNSPECIFIED
@ AV_FRAME_DATA_SEI_UNREGISTERED
User data unregistered metadata associated with a video frame.
float qcompress
amount of qscale change between easy & hard scenes (0.0-1.0)
AVRational time_base
This is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented.
enum AVPictureType pict_type
Picture type of the frame.
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
int gop_size
the number of pictures in a group of pictures, or 0 for intra_only
AVBufferRef * frame_opaque_ref
#define AV_PIX_FMT_FLAG_RGB
The pixel format contains RGB-like data (as opposed to YUV/grayscale).
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t intptr_t int int16_t * dst
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
#define AV_PIX_FMT_YUV422P12
#define AV_PIX_FMT_YUV444P12
@ AVCHROMA_LOC_UNSPECIFIED
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed.
#define AVERROR_EXTERNAL
Generic error in an external library.
int flags
A combination of AV_PKT_FLAG values.
int64_t avg_bitrate
Average bitrate of the stream, in bits per second.
#define AV_LOG_INFO
Standard information.
float b_quant_factor
qscale factor between IP and B-frames If > 0 then the last P-frame quantizer will be used (q= lastp_q...
@ AV_OPT_TYPE_FLOAT
Underlying C type is float.
static int handle_mdcv(void *logctx, const x265_api *api, x265_param *params, const AVMasteringDisplayMetadata *mdcv)
@ AV_FRAME_DATA_CONTENT_LIGHT_LEVEL
Content light level (based on CTA-861.3).
int ff_default_get_supported_config(const AVCodecContext *avctx, const AVCodec *codec, enum AVCodecConfig config, unsigned flags, const void **out_configs, int *out_num_configs)
Default implementation for avcodec_get_supported_config().
#define i(width, name, range_min, range_max)
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
uint8_t * extradata
Out-of-band global headers that may be used by some codecs.
int top
Distance in pixels from the top edge of the frame to the top and bottom edges and from the left edge ...
#define AV_PIX_FMT_GBRP12
int64_t max_bitrate
Maximum bitrate of the stream, in bits per second.
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default value
const char * name
Name of the codec implementation.
int av_buffer_replace(AVBufferRef **pdst, const AVBufferRef *src)
Ensure dst refers to the same data as src.
enum AVChromaLocation chroma_sample_location
This defines the location of chroma samples.
AVFrameSideData ** side_data
static av_cold int libx265_encode_set_roi(libx265Context *ctx, const AVFrame *frame, x265_picture *pic)
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
void * av_calloc(size_t nmemb, size_t size)
#define AV_CODEC_FLAG_CLOSED_GOP
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
int64_t buffer_size
The size of the buffer to which the ratecontrol is applied, in bits.
#define AV_PIX_FMT_YUV420P12
#define AV_INPUT_BUFFER_PADDING_SIZE
static int rd_get(libx265Context *ctx)
int max_qdiff
maximum quantizer difference between frames
main external API structure.
@ AVCOL_TRC_ARIB_STD_B67
ARIB STD-B67, known as "Hybrid log-gamma".
@ AV_PICTURE_TYPE_B
Bi-dir predicted.
int ff_get_encode_buffer(AVCodecContext *avctx, AVPacket *avpkt, int64_t size, int flags)
Get a buffer for a packet.
@ FF_DOVI_WRAP_NAL
wrap inside NAL RBSP
@ SEI_TYPE_USER_DATA_UNREGISTERED
int qmin
minimum quantizer
@ AV_OPT_TYPE_INT
Underlying C type is int.
enum AVFrameSideDataType type
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
static enum AVPixelFormat x265_csp_ten[]
attribute_deprecated int ticks_per_frame
For some codecs, the time base is closer to the field rate than the frame rate.
#define AV_CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
static av_cold int libx265_encode_close(AVCodecContext *avctx)
FFCodec ff_libx265_encoder
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
#define FF_DISABLE_DEPRECATION_WARNINGS
@ AV_PIX_FMT_GBRP
planar GBR 4:4:4 24bpp
@ AV_PICTURE_TYPE_P
Predicted.
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
int max_b_frames
maximum number of B-frames between non-B-frames Note: The output will be delayed by max_b_frames+1 re...
A reference to a data buffer.
static int handle_side_data(AVCodecContext *avctx, const x265_api *api, x265_param *params)
#define FF_CODEC_CAP_AUTO_THREADS
Codec handles avctx->thread_count == 0 (auto) internally.
Structure to hold side data for an AVFrame.
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
This structure stores compressed data.
unsigned MaxFALL
Max average light level per frame (cd/m^2).
@ AV_OPT_TYPE_BOOL
Underlying C type is int.
static const AVFrameSideData * av_frame_side_data_get(AVFrameSideData *const *sd, const int nb_sd, enum AVFrameSideDataType type)
Wrapper around av_frame_side_data_get_c() to workaround the limitation that for any type T the conver...
int width
picture width / height.
@ AV_FRAME_DATA_REGIONS_OF_INTEREST
Regions Of Interest, the data is an array of AVRegionOfInterest type, the number of array element is ...
#define flags(name, subs,...)
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
int ff_side_data_set_encoder_stats(AVPacket *pkt, int quality, int64_t *error, int error_count, int pict_type)
static const FFCodecDefault x265_defaults[]
AVCPBProperties * ff_encode_add_cpb_side_data(AVCodecContext *avctx)
Add a CPB properties side data to an encoding context.
@ AV_OPT_TYPE_STRING
Underlying C type is a uint8_t* that is either NULL or points to a C string allocated with the av_mal...
#define AV_PIX_FMT_GRAY12
#define FF_QP2LAMBDA
factor to convert from H.263 QP to lambda
AVRational qoffset
Quantisation offset.
@ AV_OPT_TYPE_CONST
Special option type for declaring named constants.
static enum AVPixelFormat x265_csp_eight[]
static enum AVPixelFormat x265_csp_twelve[]
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel.
const AVDictionaryEntry * av_dict_iterate(const AVDictionary *m, const AVDictionaryEntry *prev)
Iterate over a dictionary.
static int is_keyframe(NalUnitType naltype)