FFmpeg
librav1e.c
Go to the documentation of this file.
1 /*
2  * librav1e encoder
3  *
4  * Copyright (c) 2019 Derek Buitenhuis
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #include <rav1e.h>
24 
25 #include "libavutil/internal.h"
26 #include "libavutil/avassert.h"
27 #include "libavutil/base64.h"
28 #include "libavutil/common.h"
29 #include "libavutil/mathematics.h"
30 #include "libavutil/opt.h"
31 #include "libavutil/pixdesc.h"
32 #include "avcodec.h"
33 #include "bsf.h"
34 #include "encode.h"
35 #include "internal.h"
36 
37 typedef struct librav1eContext {
38  const AVClass *class;
39 
40  RaContext *ctx;
42  RaFrame *rframe;
44 
45  uint8_t *pass_data;
46  size_t pass_pos;
47  int pass_size;
48 
50  int quantizer;
51  int speed;
52  int tiles;
53  int tile_rows;
54  int tile_cols;
56 
57 static inline RaPixelRange range_map(enum AVPixelFormat pix_fmt, enum AVColorRange range)
58 {
59  switch (pix_fmt) {
63  return RA_PIXEL_RANGE_FULL;
64  }
65 
66  switch (range) {
67  case AVCOL_RANGE_JPEG:
68  return RA_PIXEL_RANGE_FULL;
69  case AVCOL_RANGE_MPEG:
70  default:
71  return RA_PIXEL_RANGE_LIMITED;
72  }
73 }
74 
75 static inline RaChromaSampling pix_fmt_map(enum AVPixelFormat pix_fmt)
76 {
77  switch (pix_fmt) {
78  case AV_PIX_FMT_YUV420P:
82  return RA_CHROMA_SAMPLING_CS420;
83  case AV_PIX_FMT_YUV422P:
87  return RA_CHROMA_SAMPLING_CS422;
88  case AV_PIX_FMT_YUV444P:
92  return RA_CHROMA_SAMPLING_CS444;
93  default:
94  av_assert0(0);
95  }
96 }
97 
98 static inline RaChromaSamplePosition chroma_loc_map(enum AVChromaLocation chroma_loc)
99 {
100  switch (chroma_loc) {
101  case AVCHROMA_LOC_LEFT:
102  return RA_CHROMA_SAMPLE_POSITION_VERTICAL;
104  return RA_CHROMA_SAMPLE_POSITION_COLOCATED;
105  default:
106  return RA_CHROMA_SAMPLE_POSITION_UNKNOWN;
107  }
108 }
109 
110 static int get_stats(AVCodecContext *avctx, int eos)
111 {
112  librav1eContext *ctx = avctx->priv_data;
113  RaData* buf = rav1e_twopass_out(ctx->ctx);
114  if (!buf)
115  return 0;
116 
117  if (!eos) {
118  uint8_t *tmp = av_fast_realloc(ctx->pass_data, &ctx->pass_size,
119  ctx->pass_pos + buf->len);
120  if (!tmp) {
121  rav1e_data_unref(buf);
122  return AVERROR(ENOMEM);
123  }
124 
125  ctx->pass_data = tmp;
126  memcpy(ctx->pass_data + ctx->pass_pos, buf->data, buf->len);
127  ctx->pass_pos += buf->len;
128  } else {
129  size_t b64_size = AV_BASE64_SIZE(ctx->pass_pos);
130 
131  memcpy(ctx->pass_data, buf->data, buf->len);
132 
133  avctx->stats_out = av_malloc(b64_size);
134  if (!avctx->stats_out) {
135  rav1e_data_unref(buf);
136  return AVERROR(ENOMEM);
137  }
138 
139  av_base64_encode(avctx->stats_out, b64_size, ctx->pass_data, ctx->pass_pos);
140 
141  av_freep(&ctx->pass_data);
142  }
143 
144  rav1e_data_unref(buf);
145 
146  return 0;
147 }
148 
149 static int set_stats(AVCodecContext *avctx)
150 {
151  librav1eContext *ctx = avctx->priv_data;
152  int ret = 1;
153 
154  while (ret > 0 && ctx->pass_size - ctx->pass_pos > 0) {
155  ret = rav1e_twopass_in(ctx->ctx, ctx->pass_data + ctx->pass_pos, ctx->pass_size);
156  if (ret < 0)
157  return AVERROR_EXTERNAL;
158  ctx->pass_pos += ret;
159  }
160 
161  return 0;
162 }
163 
165 {
166  librav1eContext *ctx = avctx->priv_data;
167 
168  if (ctx->ctx) {
169  rav1e_context_unref(ctx->ctx);
170  ctx->ctx = NULL;
171  }
172  if (ctx->rframe) {
173  rav1e_frame_unref(ctx->rframe);
174  ctx->rframe = NULL;
175  }
176 
177  av_frame_free(&ctx->frame);
178  av_bsf_free(&ctx->bsf);
179  av_freep(&ctx->pass_data);
180 
181  return 0;
182 }
183 
185 {
186  librav1eContext *ctx = avctx->priv_data;
188  RaConfig *cfg = NULL;
189  int rret;
190  int ret = 0;
191 
192  ctx->frame = av_frame_alloc();
193  if (!ctx->frame)
194  return AVERROR(ENOMEM);
195 
196  cfg = rav1e_config_default();
197  if (!cfg) {
198  av_log(avctx, AV_LOG_ERROR, "Could not allocate rav1e config.\n");
199  return AVERROR_EXTERNAL;
200  }
201 
202  /*
203  * Rav1e currently uses the time base given to it only for ratecontrol... where
204  * the inverse is taken and used as a framerate. So, do what we do in other wrappers
205  * and use the framerate if we can.
206  */
207  if (avctx->framerate.num > 0 && avctx->framerate.den > 0) {
208  rav1e_config_set_time_base(cfg, (RaRational) {
209  avctx->framerate.den, avctx->framerate.num
210  });
211  } else {
212  rav1e_config_set_time_base(cfg, (RaRational) {
213  avctx->time_base.num * avctx->ticks_per_frame,
214  avctx->time_base.den
215  });
216  }
217 
218  if ((avctx->flags & AV_CODEC_FLAG_PASS1 || avctx->flags & AV_CODEC_FLAG_PASS2) && !avctx->bit_rate) {
219  av_log(avctx, AV_LOG_ERROR, "A bitrate must be set to use two pass mode.\n");
221  goto end;
222  }
223 
224  if (avctx->flags & AV_CODEC_FLAG_PASS2) {
225  if (!avctx->stats_in) {
226  av_log(avctx, AV_LOG_ERROR, "No stats file provided for second pass.\n");
227  ret = AVERROR(EINVAL);
228  goto end;
229  }
230 
231  ctx->pass_size = (strlen(avctx->stats_in) * 3) / 4;
232  ctx->pass_data = av_malloc(ctx->pass_size);
233  if (!ctx->pass_data) {
234  av_log(avctx, AV_LOG_ERROR, "Could not allocate stats buffer.\n");
235  ret = AVERROR(ENOMEM);
236  goto end;
237  }
238 
239  ctx->pass_size = av_base64_decode(ctx->pass_data, avctx->stats_in, ctx->pass_size);
240  if (ctx->pass_size < 0) {
241  av_log(avctx, AV_LOG_ERROR, "Invalid pass file.\n");
242  ret = AVERROR(EINVAL);
243  goto end;
244  }
245  }
246 
247  if (avctx->flags & AV_CODEC_FLAG_GLOBAL_HEADER) {
248  const AVBitStreamFilter *filter = av_bsf_get_by_name("extract_extradata");
249  int bret;
250 
251  if (!filter) {
252  av_log(avctx, AV_LOG_ERROR, "extract_extradata bitstream filter "
253  "not found. This is a bug, please report it.\n");
254  ret = AVERROR_BUG;
255  goto end;
256  }
257 
258  bret = av_bsf_alloc(filter, &ctx->bsf);
259  if (bret < 0) {
260  ret = bret;
261  goto end;
262  }
263 
264  bret = avcodec_parameters_from_context(ctx->bsf->par_in, avctx);
265  if (bret < 0) {
266  ret = bret;
267  goto end;
268  }
269 
270  bret = av_bsf_init(ctx->bsf);
271  if (bret < 0) {
272  ret = bret;
273  goto end;
274  }
275  }
276 
277  {
278  AVDictionaryEntry *en = NULL;
279  while ((en = av_dict_get(ctx->rav1e_opts, "", en, AV_DICT_IGNORE_SUFFIX))) {
280  int parse_ret = rav1e_config_parse(cfg, en->key, en->value);
281  if (parse_ret < 0)
282  av_log(avctx, AV_LOG_WARNING, "Invalid value for %s: %s.\n", en->key, en->value);
283  }
284  }
285 
286  rret = rav1e_config_parse_int(cfg, "width", avctx->width);
287  if (rret < 0) {
288  av_log(avctx, AV_LOG_ERROR, "Invalid width passed to rav1e.\n");
290  goto end;
291  }
292 
293  rret = rav1e_config_parse_int(cfg, "height", avctx->height);
294  if (rret < 0) {
295  av_log(avctx, AV_LOG_ERROR, "Invalid height passed to rav1e.\n");
297  goto end;
298  }
299 
300  rret = rav1e_config_parse_int(cfg, "threads", avctx->thread_count);
301  if (rret < 0)
302  av_log(avctx, AV_LOG_WARNING, "Invalid number of threads, defaulting to auto.\n");
303 
304  if (ctx->speed >= 0) {
305  rret = rav1e_config_parse_int(cfg, "speed", ctx->speed);
306  if (rret < 0) {
307  av_log(avctx, AV_LOG_ERROR, "Could not set speed preset.\n");
309  goto end;
310  }
311  }
312 
313  /* rav1e handles precedence between 'tiles' and cols/rows for us. */
314  if (ctx->tiles > 0) {
315  rret = rav1e_config_parse_int(cfg, "tiles", ctx->tiles);
316  if (rret < 0) {
317  av_log(avctx, AV_LOG_ERROR, "Could not set number of tiles to encode with.\n");
319  goto end;
320  }
321  }
322  if (ctx->tile_rows > 0) {
323  rret = rav1e_config_parse_int(cfg, "tile_rows", ctx->tile_rows);
324  if (rret < 0) {
325  av_log(avctx, AV_LOG_ERROR, "Could not set number of tile rows to encode with.\n");
327  goto end;
328  }
329  }
330  if (ctx->tile_cols > 0) {
331  rret = rav1e_config_parse_int(cfg, "tile_cols", ctx->tile_cols);
332  if (rret < 0) {
333  av_log(avctx, AV_LOG_ERROR, "Could not set number of tile cols to encode with.\n");
335  goto end;
336  }
337  }
338 
339  if (avctx->gop_size > 0) {
340  rret = rav1e_config_parse_int(cfg, "key_frame_interval", avctx->gop_size);
341  if (rret < 0) {
342  av_log(avctx, AV_LOG_ERROR, "Could not set max keyint.\n");
344  goto end;
345  }
346  }
347 
348  if (avctx->keyint_min > 0) {
349  rret = rav1e_config_parse_int(cfg, "min_key_frame_interval", avctx->keyint_min);
350  if (rret < 0) {
351  av_log(avctx, AV_LOG_ERROR, "Could not set min keyint.\n");
353  goto end;
354  }
355  }
356 
357  if (avctx->bit_rate && ctx->quantizer < 0) {
358  int max_quantizer = avctx->qmax >= 0 ? avctx->qmax : 255;
359 
360  rret = rav1e_config_parse_int(cfg, "quantizer", max_quantizer);
361  if (rret < 0) {
362  av_log(avctx, AV_LOG_ERROR, "Could not set max quantizer.\n");
364  goto end;
365  }
366 
367  if (avctx->qmin >= 0) {
368  rret = rav1e_config_parse_int(cfg, "min_quantizer", avctx->qmin);
369  if (rret < 0) {
370  av_log(avctx, AV_LOG_ERROR, "Could not set min quantizer.\n");
372  goto end;
373  }
374  }
375 
376  rret = rav1e_config_parse_int(cfg, "bitrate", avctx->bit_rate);
377  if (rret < 0) {
378  av_log(avctx, AV_LOG_ERROR, "Could not set bitrate.\n");
380  goto end;
381  }
382  } else if (ctx->quantizer >= 0) {
383  if (avctx->bit_rate)
384  av_log(avctx, AV_LOG_WARNING, "Both bitrate and quantizer specified. Using quantizer mode.");
385 
386  rret = rav1e_config_parse_int(cfg, "quantizer", ctx->quantizer);
387  if (rret < 0) {
388  av_log(avctx, AV_LOG_ERROR, "Could not set quantizer.\n");
390  goto end;
391  }
392  }
393 
394  rret = rav1e_config_set_pixel_format(cfg, desc->comp[0].depth,
395  pix_fmt_map(avctx->pix_fmt),
397  range_map(avctx->pix_fmt, avctx->color_range));
398  if (rret < 0) {
399  av_log(avctx, AV_LOG_ERROR, "Failed to set pixel format properties.\n");
401  goto end;
402  }
403 
404  /* rav1e's colorspace enums match standard values. */
405  rret = rav1e_config_set_color_description(cfg, (RaMatrixCoefficients) avctx->colorspace,
406  (RaColorPrimaries) avctx->color_primaries,
407  (RaTransferCharacteristics) avctx->color_trc);
408  if (rret < 0) {
409  av_log(avctx, AV_LOG_WARNING, "Failed to set color properties.\n");
410  if (avctx->err_recognition & AV_EF_EXPLODE) {
412  goto end;
413  }
414  }
415 
416  ctx->ctx = rav1e_context_new(cfg);
417  if (!ctx->ctx) {
418  av_log(avctx, AV_LOG_ERROR, "Failed to create rav1e encode context.\n");
420  goto end;
421  }
422 
423  ret = 0;
424 
425 end:
426 
427  rav1e_config_unref(cfg);
428 
429  return ret;
430 }
431 
433 {
434  librav1eContext *ctx = avctx->priv_data;
435  RaFrame *rframe = ctx->rframe;
436  RaPacket *rpkt = NULL;
437  int ret;
438 
439  if (!rframe) {
440  AVFrame *frame = ctx->frame;
441 
442  ret = ff_encode_get_frame(avctx, frame);
443  if (ret < 0 && ret != AVERROR_EOF)
444  return ret;
445 
446  if (frame->buf[0]) {
448 
449  int64_t *pts = av_malloc(sizeof(int64_t));
450  if (!pts) {
451  av_log(avctx, AV_LOG_ERROR, "Could not allocate PTS buffer.\n");
452  return AVERROR(ENOMEM);
453  }
454  *pts = frame->pts;
455 
456  rframe = rav1e_frame_new(ctx->ctx);
457  if (!rframe) {
458  av_log(avctx, AV_LOG_ERROR, "Could not allocate new rav1e frame.\n");
460  av_freep(&pts);
461  return AVERROR(ENOMEM);
462  }
463 
464  for (int i = 0; i < desc->nb_components; i++) {
465  int shift = i ? desc->log2_chroma_h : 0;
466  int bytes = desc->comp[0].depth == 8 ? 1 : 2;
467  rav1e_frame_fill_plane(rframe, i, frame->data[i],
468  (frame->height >> shift) * frame->linesize[i],
469  frame->linesize[i], bytes);
470  }
472  rav1e_frame_set_opaque(rframe, pts, av_free);
473  }
474  }
475 
476  ret = rav1e_send_frame(ctx->ctx, rframe);
477  if (rframe)
478  if (ret == RA_ENCODER_STATUS_ENOUGH_DATA) {
479  ctx->rframe = rframe; /* Queue is full. Store the RaFrame to retry next call */
480  } else {
481  rav1e_frame_unref(rframe); /* No need to unref if flushing. */
482  ctx->rframe = NULL;
483  }
484 
485  switch (ret) {
486  case RA_ENCODER_STATUS_SUCCESS:
487  case RA_ENCODER_STATUS_ENOUGH_DATA:
488  break;
489  case RA_ENCODER_STATUS_FAILURE:
490  av_log(avctx, AV_LOG_ERROR, "Could not send frame: %s\n", rav1e_status_to_str(ret));
491  return AVERROR_EXTERNAL;
492  default:
493  av_log(avctx, AV_LOG_ERROR, "Unknown return code %d from rav1e_send_frame: %s\n", ret, rav1e_status_to_str(ret));
494  return AVERROR_UNKNOWN;
495  }
496 
497 retry:
498 
499  if (avctx->flags & AV_CODEC_FLAG_PASS1) {
500  int sret = get_stats(avctx, 0);
501  if (sret < 0)
502  return sret;
503  } else if (avctx->flags & AV_CODEC_FLAG_PASS2) {
504  int sret = set_stats(avctx);
505  if (sret < 0)
506  return sret;
507  }
508 
509  ret = rav1e_receive_packet(ctx->ctx, &rpkt);
510  switch (ret) {
511  case RA_ENCODER_STATUS_SUCCESS:
512  break;
513  case RA_ENCODER_STATUS_LIMIT_REACHED:
514  if (avctx->flags & AV_CODEC_FLAG_PASS1) {
515  int sret = get_stats(avctx, 1);
516  if (sret < 0)
517  return sret;
518  }
519  return AVERROR_EOF;
520  case RA_ENCODER_STATUS_ENCODED:
521  goto retry;
522  case RA_ENCODER_STATUS_NEED_MORE_DATA:
523  if (avctx->internal->draining) {
524  av_log(avctx, AV_LOG_ERROR, "Unexpected error when receiving packet after EOF.\n");
525  return AVERROR_EXTERNAL;
526  }
527  return AVERROR(EAGAIN);
528  case RA_ENCODER_STATUS_FAILURE:
529  av_log(avctx, AV_LOG_ERROR, "Could not encode frame: %s\n", rav1e_status_to_str(ret));
530  return AVERROR_EXTERNAL;
531  default:
532  av_log(avctx, AV_LOG_ERROR, "Unknown return code %d from rav1e_receive_packet: %s\n", ret, rav1e_status_to_str(ret));
533  return AVERROR_UNKNOWN;
534  }
535 
536  ret = ff_get_encode_buffer(avctx, pkt, rpkt->len, 0);
537  if (ret < 0) {
538  av_log(avctx, AV_LOG_ERROR, "Could not allocate packet.\n");
539  rav1e_packet_unref(rpkt);
540  return ret;
541  }
542 
543  memcpy(pkt->data, rpkt->data, rpkt->len);
544 
545  if (rpkt->frame_type == RA_FRAME_TYPE_KEY)
547 
548  pkt->pts = pkt->dts = *((int64_t *) rpkt->opaque);
549  av_free(rpkt->opaque);
550  rav1e_packet_unref(rpkt);
551 
552  if (avctx->flags & AV_CODEC_FLAG_GLOBAL_HEADER) {
553  int ret = av_bsf_send_packet(ctx->bsf, pkt);
554  if (ret < 0) {
555  av_log(avctx, AV_LOG_ERROR, "extradata extraction send failed.\n");
557  return ret;
558  }
559 
560  ret = av_bsf_receive_packet(ctx->bsf, pkt);
561  if (ret < 0) {
562  av_log(avctx, AV_LOG_ERROR, "extradata extraction receive failed.\n");
564  return ret;
565  }
566  }
567 
568  return 0;
569 }
570 
571 #define OFFSET(x) offsetof(librav1eContext, x)
572 #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
573 
574 static const AVOption options[] = {
575  { "qp", "use constant quantizer mode", OFFSET(quantizer), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, 255, VE },
576  { "speed", "what speed preset to use", OFFSET(speed), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, 10, VE },
577  { "tiles", "number of tiles encode with", OFFSET(tiles), AV_OPT_TYPE_INT, { .i64 = 0 }, -1, INT64_MAX, VE },
578  { "tile-rows", "number of tiles rows to encode with", OFFSET(tile_rows), AV_OPT_TYPE_INT, { .i64 = 0 }, -1, INT64_MAX, VE },
579  { "tile-columns", "number of tiles columns to encode with", OFFSET(tile_cols), AV_OPT_TYPE_INT, { .i64 = 0 }, -1, INT64_MAX, VE },
580  { "rav1e-params", "set the rav1e configuration using a :-separated list of key=value parameters", OFFSET(rav1e_opts), AV_OPT_TYPE_DICT, { 0 }, 0, 0, VE },
581  { NULL }
582 };
583 
585  { "b", "0" },
586  { "g", "0" },
587  { "keyint_min", "0" },
588  { "qmax", "-1" },
589  { "qmin", "-1" },
590  { NULL }
591 };
592 
607 };
608 
609 static const AVClass class = {
610  .class_name = "librav1e",
611  .item_name = av_default_item_name,
612  .option = options,
614 };
615 
617  .name = "librav1e",
618  .long_name = NULL_IF_CONFIG_SMALL("librav1e AV1"),
619  .type = AVMEDIA_TYPE_VIDEO,
620  .id = AV_CODEC_ID_AV1,
621  .init = librav1e_encode_init,
622  .receive_packet = librav1e_receive_packet,
623  .close = librav1e_encode_close,
624  .priv_data_size = sizeof(librav1eContext),
625  .priv_class = &class,
631  .wrapper_name = "librav1e",
632 };
OFFSET
#define OFFSET(x)
Definition: librav1e.c:571
av_packet_unref
void av_packet_unref(AVPacket *pkt)
Wipe the packet.
Definition: avpacket.c:424
AVCodec
AVCodec.
Definition: codec.h:202
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
AVCodecContext::keyint_min
int keyint_min
minimum GOP size
Definition: avcodec.h:925
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
AVCodecContext::colorspace
enum AVColorSpace colorspace
YUV colorspace type.
Definition: avcodec.h:960
librav1eContext::bsf
AVBSFContext * bsf
Definition: librav1e.c:43
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2660
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:57
avcodec_parameters_from_context
int avcodec_parameters_from_context(AVCodecParameters *par, const AVCodecContext *codec)
Fill the parameters struct based on the values from the supplied codec context.
Definition: codec_par.c:90
AVCodecContext::err_recognition
int err_recognition
Error recognition; may misdetect some more or less valid parts as errors.
Definition: avcodec.h:1324
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:109
av_bsf_init
int av_bsf_init(AVBSFContext *ctx)
Prepare the filter for use, after all the parameters and options have been set.
Definition: bsf.c:145
librav1eContext::rframe
RaFrame * rframe
Definition: librav1e.c:42
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:317
tmp
static uint8_t tmp[11]
Definition: aes_ctr.c:26
pixdesc.h
AVCodecContext::color_trc
enum AVColorTransferCharacteristic color_trc
Color Transfer Characteristic.
Definition: avcodec.h:953
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:597
set_stats
static int set_stats(AVCodecContext *avctx)
Definition: librav1e.c:149
internal.h
AVPacket::data
uint8_t * data
Definition: packet.h:373
AVOption
AVOption.
Definition: opt.h:247
encode.h
librav1eContext::frame
AVFrame * frame
Definition: librav1e.c:41
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:404
AV_DICT_IGNORE_SUFFIX
#define AV_DICT_IGNORE_SUFFIX
Return first entry in a dictionary whose first part corresponds to the search key,...
Definition: dict.h:68
mathematics.h
filter
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce then the filter should push the output frames on the output link immediately As an exception to the previous rule if the input frame is enough to produce several output frames then the filter needs output only at least one per link The additional frames can be left buffered in the filter
Definition: filter_design.txt:228
AVDictionary
Definition: dict.c:30
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVCodecContext::qmax
int qmax
maximum quantizer
Definition: avcodec.h:1165
AV_PKT_FLAG_KEY
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: packet.h:428
AVBSFContext
The bitstream filter state.
Definition: bsf.h:47
av_malloc
#define av_malloc(s)
Definition: tableprint_vlc.h:31
AV_CODEC_FLAG_GLOBAL_HEADER
#define AV_CODEC_FLAG_GLOBAL_HEADER
Place global headers in extradata instead of every keyframe.
Definition: avcodec.h:268
av_bsf_get_by_name
const AVBitStreamFilter * av_bsf_get_by_name(const char *name)
Definition: bitstream_filters.c:78
AVCodecContext::framerate
AVRational framerate
Definition: avcodec.h:1710
librav1eContext::tiles
int tiles
Definition: librav1e.c:52
librav1eContext::ctx
RaContext * ctx
Definition: librav1e.c:40
bsf.h
AVCodecContext::thread_count
int thread_count
thread count is used to decide how many independent tasks should be passed to execute()
Definition: avcodec.h:1440
defaults
static const AVCodecDefault defaults[]
Definition: amfenc_h264.c:361
AVCodecContext::flags
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:463
pts
static int64_t pts
Definition: transcode_aac.c:653
AVRational::num
int num
Numerator.
Definition: rational.h:59
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:97
AV_PIX_FMT_YUV444P10
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:407
avassert.h
AVCodecContext::color_primaries
enum AVColorPrimaries color_primaries
Chromaticity coordinates of the source primaries.
Definition: avcodec.h:946
pkt
AVPacket * pkt
Definition: movenc.c:59
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
av_cold
#define av_cold
Definition: attributes.h:90
av_dict_get
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:40
AV_PIX_FMT_YUVJ422P
@ AV_PIX_FMT_YUVJ422P
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:79
av_fast_realloc
void * av_fast_realloc(void *ptr, unsigned int *size, size_t min_size)
Reallocate the given buffer if it is not large enough, otherwise do nothing.
Definition: mem.c:504
librav1eContext
Definition: librav1e.c:37
AVCodecContext::stats_in
char * stats_in
pass2 encoding statistics input buffer Concatenated stuff from stats_out of pass1 should be placed he...
Definition: avcodec.h:1244
librav1e_encode_init
static av_cold int librav1e_encode_init(AVCodecContext *avctx)
Definition: librav1e.c:184
librav1e_defaults
static const AVCodecDefault librav1e_defaults[]
Definition: librav1e.c:584
librav1eContext::pass_pos
size_t pass_pos
Definition: librav1e.c:46
AVDictionaryEntry::key
char * key
Definition: dict.h:80
AVCodecContext::ticks_per_frame
int ticks_per_frame
For some codecs, the time base is closer to the field rate than the frame rate.
Definition: avcodec.h:515
AV_CODEC_CAP_OTHER_THREADS
#define AV_CODEC_CAP_OTHER_THREADS
Codec supports multithreading through a method other than slice- or frame-level multithreading.
Definition: codec.h:127
librav1eContext::pass_size
int pass_size
Definition: librav1e.c:47
tile_rows
int tile_rows
Definition: h265_levels.c:217
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:296
ctx
AVFormatContext * ctx
Definition: movenc.c:48
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demuxing_decoding.c:41
tile_cols
int tile_cols
Definition: h265_levels.c:218
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
AV_PIX_FMT_YUVJ444P
@ AV_PIX_FMT_YUVJ444P
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:80
AVCodecDefault
Definition: internal.h:215
ff_librav1e_encoder
const AVCodec ff_librav1e_encoder
Definition: librav1e.c:616
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:66
NULL
#define NULL
Definition: coverity.c:32
AVCodecContext::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: avcodec.h:967
AV_CODEC_ID_AV1
@ AV_CODEC_ID_AV1
Definition: codec_id.h:279
AVCHROMA_LOC_LEFT
@ AVCHROMA_LOC_LEFT
MPEG-2/4 4:2:0, H.264 default for 4:2:0.
Definition: pixfmt.h:618
librav1eContext::rav1e_opts
AVDictionary * rav1e_opts
Definition: librav1e.c:49
AVCHROMA_LOC_TOPLEFT
@ AVCHROMA_LOC_TOPLEFT
ITU-R 601, SMPTE 274M 296M S314M(DV 4:1:1), mpeg2 4:2:2.
Definition: pixfmt.h:620
AVCodecContext::internal
struct AVCodecInternal * internal
Private context used for internal data.
Definition: avcodec.h:418
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:78
AVCodecContext::bit_rate
int64_t bit_rate
the average bitrate
Definition: avcodec.h:433
AV_OPT_TYPE_DICT
@ AV_OPT_TYPE_DICT
Definition: opt.h:231
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:235
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:405
librav1eContext::tile_rows
int tile_rows
Definition: librav1e.c:53
av_base64_decode
int av_base64_decode(uint8_t *out, const char *in_str, int out_size)
Decode a base64-encoded string.
Definition: base64.c:79
AV_EF_EXPLODE
#define AV_EF_EXPLODE
abort decoding on minor error detection
Definition: avcodec.h:1335
base64.h
librav1e_receive_packet
static int librav1e_receive_packet(AVCodecContext *avctx, AVPacket *pkt)
Definition: librav1e.c:432
AVCodecContext::time_base
AVRational time_base
This is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented.
Definition: avcodec.h:506
FF_CODEC_CAP_AUTO_THREADS
#define FF_CODEC_CAP_AUTO_THREADS
Codec handles avctx->thread_count == 0 (auto) internally.
Definition: internal.h:81
AVCodecContext::stats_out
char * stats_out
pass1 encoding statistics output buffer
Definition: avcodec.h:1236
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:117
AVCodecContext::gop_size
int gop_size
the number of pictures in a group of pictures, or 0 for intra_only
Definition: avcodec.h:578
AV_PIX_FMT_YUV422P12
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:409
AV_PIX_FMT_YUV444P12
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:411
librav1eContext::pass_data
uint8_t * pass_data
Definition: librav1e.c:45
range_map
static RaPixelRange range_map(enum AVPixelFormat pix_fmt, enum AVColorRange range)
Definition: librav1e.c:57
AVPacket::dts
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed.
Definition: packet.h:372
AV_CODEC_FLAG_PASS2
#define AV_CODEC_FLAG_PASS2
Use internal 2pass ratecontrol in second pass mode.
Definition: avcodec.h:235
pix_fmt_map
static RaChromaSampling pix_fmt_map(enum AVPixelFormat pix_fmt)
Definition: librav1e.c:75
librav1eContext::tile_cols
int tile_cols
Definition: librav1e.c:54
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:59
AVPacket::flags
int flags
A combination of AV_PKT_FLAG values.
Definition: packet.h:379
AVChromaLocation
AVChromaLocation
Location of chroma samples.
Definition: pixfmt.h:616
AV_BASE64_SIZE
#define AV_BASE64_SIZE(x)
Calculate the output size needed to base64-encode x bytes to a null-terminated string.
Definition: base64.h:66
av_bsf_receive_packet
int av_bsf_receive_packet(AVBSFContext *ctx, AVPacket *pkt)
Retrieve a filtered packet.
Definition: bsf.c:226
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:271
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:366
librav1eContext::speed
int speed
Definition: librav1e.c:51
FF_CODEC_CAP_INIT_CLEANUP
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
Definition: internal.h:50
internal.h
VE
#define VE
Definition: librav1e.c:572
common.h
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:435
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:209
AVCodecContext::chroma_sample_location
enum AVChromaLocation chroma_sample_location
This defines the location of chroma samples.
Definition: avcodec.h:974
AVCodecContext::height
int height
Definition: avcodec.h:556
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:593
AVCOL_RANGE_MPEG
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:580
librav1eContext::quantizer
int quantizer
Definition: librav1e.c:50
avcodec.h
options
static const AVOption options[]
Definition: librav1e.c:574
ret
ret
Definition: filter_design.txt:187
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:71
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AV_PIX_FMT_YUV420P12
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:408
av_bsf_send_packet
int av_bsf_send_packet(AVBSFContext *ctx, AVPacket *pkt)
Submit a packet for filtering.
Definition: bsf.c:198
AVCodecContext
main external API structure.
Definition: avcodec.h:383
AVBitStreamFilter
Definition: bsf.h:90
ff_get_encode_buffer
int ff_get_encode_buffer(AVCodecContext *avctx, AVPacket *avpkt, int64_t size, int flags)
Get a buffer for a packet.
Definition: encode.c:78
AVCodecContext::qmin
int qmin
minimum quantizer
Definition: avcodec.h:1158
AVRational::den
int den
Denominator.
Definition: rational.h:60
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:224
av_base64_encode
char * av_base64_encode(char *out, int out_size, const uint8_t *in, int in_size)
Encode data to base64 and null-terminate.
Definition: base64.c:143
AV_CODEC_CAP_DELAY
#define AV_CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
Definition: codec.h:82
chroma_loc_map
static RaChromaSamplePosition chroma_loc_map(enum AVChromaLocation chroma_loc)
Definition: librav1e.c:98
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
AVCodecInternal::draining
int draining
checks API usage: after codec draining, flush is required to resume operation
Definition: internal.h:190
shift
static int shift(int a, int b)
Definition: sonic.c:83
desc
const char * desc
Definition: libsvtav1.c:79
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
ff_encode_get_frame
int ff_encode_get_frame(AVCodecContext *avctx, AVFrame *frame)
Called by encoders to get the next frame for encoding.
Definition: encode.c:157
librav1e_pix_fmts
enum AVPixelFormat librav1e_pix_fmts[]
Definition: librav1e.c:593
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
av_free
#define av_free(p)
Definition: tableprint_vlc.h:34
AVDictionaryEntry
Definition: dict.h:79
AVPacket
This structure stores compressed data.
Definition: packet.h:350
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:410
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
av_bsf_free
void av_bsf_free(AVBSFContext **pctx)
Free a bitstream filter context and everything associated with it; write NULL into the supplied point...
Definition: bsf.c:48
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:556
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:52
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:61
AVDictionaryEntry::value
char * value
Definition: dict.h:81
AVColorRange
AVColorRange
Visual content value range.
Definition: pixfmt.h:562
av_bsf_alloc
int av_bsf_alloc(const AVBitStreamFilter *filter, AVBSFContext **pctx)
Allocate a context for a given bitstream filter.
Definition: bsf.c:100
librav1e_encode_close
static av_cold int librav1e_encode_close(AVCodecContext *avctx)
Definition: librav1e.c:164
AV_CODEC_FLAG_PASS1
#define AV_CODEC_FLAG_PASS1
Use internal 2pass ratecontrol in first pass mode.
Definition: avcodec.h:231
get_stats
static int get_stats(AVCodecContext *avctx, int eos)
Definition: librav1e.c:110