FFmpeg
ffv1dec.c
Go to the documentation of this file.
1 /*
2  * FFV1 decoder
3  *
4  * Copyright (c) 2003-2013 Michael Niedermayer <michaelni@gmx.at>
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 /**
24  * @file
25  * FF Video Codec 1 (a lossless codec) decoder
26  */
27 
28 #include "libavutil/avassert.h"
29 #include "libavutil/crc.h"
30 #include "libavutil/opt.h"
31 #include "libavutil/imgutils.h"
32 #include "libavutil/pixdesc.h"
33 #include "avcodec.h"
34 #include "codec_internal.h"
35 #include "get_bits.h"
36 #include "rangecoder.h"
37 #include "golomb.h"
38 #include "mathops.h"
39 #include "ffv1.h"
40 #include "thread.h"
41 #include "threadframe.h"
42 
43 static inline av_flatten int get_symbol_inline(RangeCoder *c, uint8_t *state,
44  int is_signed)
45 {
46  if (get_rac(c, state + 0))
47  return 0;
48  else {
49  int i, e;
50  unsigned a;
51  e = 0;
52  while (get_rac(c, state + 1 + FFMIN(e, 9))) { // 1..10
53  e++;
54  if (e > 31)
55  return AVERROR_INVALIDDATA;
56  }
57 
58  a = 1;
59  for (i = e - 1; i >= 0; i--)
60  a += a + get_rac(c, state + 22 + FFMIN(i, 9)); // 22..31
61 
62  e = -(is_signed && get_rac(c, state + 11 + FFMIN(e, 10))); // 11..21
63  return (a ^ e) - e;
64  }
65 }
66 
67 static av_noinline int get_symbol(RangeCoder *c, uint8_t *state, int is_signed)
68 {
69  return get_symbol_inline(c, state, is_signed);
70 }
71 
72 static inline int get_vlc_symbol(GetBitContext *gb, VlcState *const state,
73  int bits)
74 {
75  int k, i, v, ret;
76 
77  i = state->count;
78  k = 0;
79  while (i < state->error_sum) { // FIXME: optimize
80  k++;
81  i += i;
82  }
83 
84  v = get_sr_golomb(gb, k, 12, bits);
85  ff_dlog(NULL, "v:%d bias:%d error:%d drift:%d count:%d k:%d",
86  v, state->bias, state->error_sum, state->drift, state->count, k);
87 
88  v ^= ((2 * state->drift + state->count) >> 31);
89 
90  ret = fold(v + state->bias, bits);
91 
93 
94  return ret;
95 }
96 
98 {
99  if (s->ac != AC_GOLOMB_RICE) {
100  RangeCoder *const c = &s->c;
101  if (c->overread > MAX_OVERREAD)
102  return AVERROR_INVALIDDATA;
103  } else {
104  if (get_bits_left(&s->gb) < 1)
105  return AVERROR_INVALIDDATA;
106  }
107  return 0;
108 }
109 
110 #define TYPE int16_t
111 #define RENAME(name) name
112 #include "ffv1dec_template.c"
113 #undef TYPE
114 #undef RENAME
115 
116 #define TYPE int32_t
117 #define RENAME(name) name ## 32
118 #include "ffv1dec_template.c"
119 
120 static int decode_plane(FFV1Context *s, uint8_t *src,
121  int w, int h, int stride, int plane_index,
122  int pixel_stride)
123 {
124  int x, y;
125  int16_t *sample[2];
126  sample[0] = s->sample_buffer + 3;
127  sample[1] = s->sample_buffer + w + 6 + 3;
128 
129  s->run_index = 0;
130 
131  memset(s->sample_buffer, 0, 2 * (w + 6) * sizeof(*s->sample_buffer));
132 
133  for (y = 0; y < h; y++) {
134  int16_t *temp = sample[0]; // FIXME: try a normal buffer
135 
136  sample[0] = sample[1];
137  sample[1] = temp;
138 
139  sample[1][-1] = sample[0][0];
140  sample[0][w] = sample[0][w - 1];
141 
142  if (s->avctx->bits_per_raw_sample <= 8) {
143  int ret = decode_line(s, w, sample, plane_index, 8);
144  if (ret < 0)
145  return ret;
146  for (x = 0; x < w; x++)
147  src[x*pixel_stride + stride * y] = sample[1][x];
148  } else {
149  int ret = decode_line(s, w, sample, plane_index, s->avctx->bits_per_raw_sample);
150  if (ret < 0)
151  return ret;
152  if (s->packed_at_lsb) {
153  for (x = 0; x < w; x++) {
154  ((uint16_t*)(src + stride*y))[x*pixel_stride] = sample[1][x];
155  }
156  } else {
157  for (x = 0; x < w; x++) {
158  ((uint16_t*)(src + stride*y))[x*pixel_stride] = sample[1][x] << (16 - s->avctx->bits_per_raw_sample) | ((uint16_t **)sample)[1][x] >> (2 * s->avctx->bits_per_raw_sample - 16);
159  }
160  }
161  }
162  }
163  return 0;
164 }
165 
167 {
168  RangeCoder *c = &fs->c;
169  uint8_t state[CONTEXT_SIZE];
170  unsigned ps, i, context_count;
171  memset(state, 128, sizeof(state));
172 
173  av_assert0(f->version > 2);
174 
175  fs->slice_x = get_symbol(c, state, 0) * f->width ;
176  fs->slice_y = get_symbol(c, state, 0) * f->height;
177  fs->slice_width = (get_symbol(c, state, 0) + 1) * f->width + fs->slice_x;
178  fs->slice_height = (get_symbol(c, state, 0) + 1) * f->height + fs->slice_y;
179 
180  fs->slice_x /= f->num_h_slices;
181  fs->slice_y /= f->num_v_slices;
182  fs->slice_width = fs->slice_width /f->num_h_slices - fs->slice_x;
183  fs->slice_height = fs->slice_height/f->num_v_slices - fs->slice_y;
184  if ((unsigned)fs->slice_width > f->width || (unsigned)fs->slice_height > f->height)
185  return -1;
186  if ( (unsigned)fs->slice_x + (uint64_t)fs->slice_width > f->width
187  || (unsigned)fs->slice_y + (uint64_t)fs->slice_height > f->height)
188  return -1;
189 
190  for (i = 0; i < f->plane_count; i++) {
191  PlaneContext * const p = &fs->plane[i];
192  int idx = get_symbol(c, state, 0);
193  if (idx >= (unsigned)f->quant_table_count) {
194  av_log(f->avctx, AV_LOG_ERROR, "quant_table_index out of range\n");
195  return -1;
196  }
197  p->quant_table_index = idx;
198  memcpy(p->quant_table, f->quant_tables[idx], sizeof(p->quant_table));
199  context_count = f->context_count[idx];
200 
201  if (p->context_count < context_count) {
202  av_freep(&p->state);
203  av_freep(&p->vlc_state);
204  }
206  }
207 
208  ps = get_symbol(c, state, 0);
209  if (ps == 1) {
210  f->cur->interlaced_frame = 1;
211  f->cur->top_field_first = 1;
212  } else if (ps == 2) {
213  f->cur->interlaced_frame = 1;
214  f->cur->top_field_first = 0;
215  } else if (ps == 3) {
216  f->cur->interlaced_frame = 0;
217  }
218  f->cur->sample_aspect_ratio.num = get_symbol(c, state, 0);
219  f->cur->sample_aspect_ratio.den = get_symbol(c, state, 0);
220 
221  if (av_image_check_sar(f->width, f->height,
222  f->cur->sample_aspect_ratio) < 0) {
223  av_log(f->avctx, AV_LOG_WARNING, "ignoring invalid SAR: %u/%u\n",
224  f->cur->sample_aspect_ratio.num,
225  f->cur->sample_aspect_ratio.den);
226  f->cur->sample_aspect_ratio = (AVRational){ 0, 1 };
227  }
228 
229  if (fs->version > 3) {
230  fs->slice_reset_contexts = get_rac(c, state);
231  fs->slice_coding_mode = get_symbol(c, state, 0);
232  if (fs->slice_coding_mode != 1) {
233  fs->slice_rct_by_coef = get_symbol(c, state, 0);
234  fs->slice_rct_ry_coef = get_symbol(c, state, 0);
235  if ((uint64_t)fs->slice_rct_by_coef + (uint64_t)fs->slice_rct_ry_coef > 4) {
236  av_log(f->avctx, AV_LOG_ERROR, "slice_rct_y_coef out of range\n");
237  return AVERROR_INVALIDDATA;
238  }
239  }
240  }
241 
242  return 0;
243 }
244 
245 static int decode_slice(AVCodecContext *c, void *arg)
246 {
247  FFV1Context *fs = *(void **)arg;
248  FFV1Context *f = fs->avctx->priv_data;
249  int width, height, x, y, ret;
250  const int ps = av_pix_fmt_desc_get(c->pix_fmt)->comp[0].step;
251  AVFrame * const p = f->cur;
252  int i, si;
253 
254  for( si=0; fs != f->slice_context[si]; si ++)
255  ;
256 
257  if(f->fsrc && !p->key_frame)
258  ff_thread_await_progress(&f->last_picture, si, 0);
259 
260  if(f->fsrc && !p->key_frame) {
261  FFV1Context *fssrc = f->fsrc->slice_context[si];
262  FFV1Context *fsdst = f->slice_context[si];
263  av_assert1(fsdst->plane_count == fssrc->plane_count);
264  av_assert1(fsdst == fs);
265 
266  if (!p->key_frame)
267  fsdst->slice_damaged |= fssrc->slice_damaged;
268 
269  for (i = 0; i < f->plane_count; i++) {
270  PlaneContext *psrc = &fssrc->plane[i];
271  PlaneContext *pdst = &fsdst->plane[i];
272 
273  av_free(pdst->state);
274  av_free(pdst->vlc_state);
275  memcpy(pdst, psrc, sizeof(*pdst));
276  pdst->state = NULL;
277  pdst->vlc_state = NULL;
278 
279  if (fssrc->ac) {
281  memcpy(pdst->state, psrc->state, CONTEXT_SIZE * psrc->context_count);
282  } else {
283  pdst->vlc_state = av_malloc_array(sizeof(*pdst->vlc_state), psrc->context_count);
284  memcpy(pdst->vlc_state, psrc->vlc_state, sizeof(*pdst->vlc_state) * psrc->context_count);
285  }
286  }
287  }
288 
289  fs->slice_rct_by_coef = 1;
290  fs->slice_rct_ry_coef = 1;
291 
292  if (f->version > 2) {
293  if (ff_ffv1_init_slice_state(f, fs) < 0)
294  return AVERROR(ENOMEM);
295  if (decode_slice_header(f, fs) < 0) {
296  fs->slice_x = fs->slice_y = fs->slice_height = fs->slice_width = 0;
297  fs->slice_damaged = 1;
298  return AVERROR_INVALIDDATA;
299  }
300  }
301  if ((ret = ff_ffv1_init_slice_state(f, fs)) < 0)
302  return ret;
303  if (f->cur->key_frame || fs->slice_reset_contexts)
305 
306  width = fs->slice_width;
307  height = fs->slice_height;
308  x = fs->slice_x;
309  y = fs->slice_y;
310 
311  if (fs->ac == AC_GOLOMB_RICE) {
312  if (f->version == 3 && f->micro_version > 1 || f->version > 3)
313  get_rac(&fs->c, (uint8_t[]) { 129 });
314  fs->ac_byte_count = f->version > 2 || (!x && !y) ? fs->c.bytestream - fs->c.bytestream_start - 1 : 0;
315  init_get_bits(&fs->gb,
316  fs->c.bytestream_start + fs->ac_byte_count,
317  (fs->c.bytestream_end - fs->c.bytestream_start - fs->ac_byte_count) * 8);
318  }
319 
320  av_assert1(width && height);
321  if (f->colorspace == 0 && (f->chroma_planes || !fs->transparency)) {
322  const int chroma_width = AV_CEIL_RSHIFT(width, f->chroma_h_shift);
323  const int chroma_height = AV_CEIL_RSHIFT(height, f->chroma_v_shift);
324  const int cx = x >> f->chroma_h_shift;
325  const int cy = y >> f->chroma_v_shift;
326  decode_plane(fs, p->data[0] + ps*x + y*p->linesize[0], width, height, p->linesize[0], 0, 1);
327 
328  if (f->chroma_planes) {
329  decode_plane(fs, p->data[1] + ps*cx+cy*p->linesize[1], chroma_width, chroma_height, p->linesize[1], 1, 1);
330  decode_plane(fs, p->data[2] + ps*cx+cy*p->linesize[2], chroma_width, chroma_height, p->linesize[2], 1, 1);
331  }
332  if (fs->transparency)
333  decode_plane(fs, p->data[3] + ps*x + y*p->linesize[3], width, height, p->linesize[3], (f->version >= 4 && !f->chroma_planes) ? 1 : 2, 1);
334  } else if (f->colorspace == 0) {
335  decode_plane(fs, p->data[0] + ps*x + y*p->linesize[0] , width, height, p->linesize[0], 0, 2);
336  decode_plane(fs, p->data[0] + ps*x + y*p->linesize[0] + 1, width, height, p->linesize[0], 1, 2);
337  } else if (f->use32bit) {
338  uint8_t *planes[4] = { p->data[0] + ps * x + y * p->linesize[0],
339  p->data[1] + ps * x + y * p->linesize[1],
340  p->data[2] + ps * x + y * p->linesize[2],
341  p->data[3] + ps * x + y * p->linesize[3] };
342  decode_rgb_frame32(fs, planes, width, height, p->linesize);
343  } else {
344  uint8_t *planes[4] = { p->data[0] + ps * x + y * p->linesize[0],
345  p->data[1] + ps * x + y * p->linesize[1],
346  p->data[2] + ps * x + y * p->linesize[2],
347  p->data[3] + ps * x + y * p->linesize[3] };
348  decode_rgb_frame(fs, planes, width, height, p->linesize);
349  }
350  if (fs->ac != AC_GOLOMB_RICE && f->version > 2) {
351  int v;
352  get_rac(&fs->c, (uint8_t[]) { 129 });
353  v = fs->c.bytestream_end - fs->c.bytestream - 2 - 5*f->ec;
354  if (v) {
355  av_log(f->avctx, AV_LOG_ERROR, "bytestream end mismatching by %d\n", v);
356  fs->slice_damaged = 1;
357  }
358  }
359 
360  emms_c();
361 
362  ff_thread_report_progress(&f->picture, si, 0);
363 
364  return 0;
365 }
366 
367 static int read_quant_table(RangeCoder *c, int16_t *quant_table, int scale)
368 {
369  int v;
370  int i = 0;
371  uint8_t state[CONTEXT_SIZE];
372 
373  memset(state, 128, sizeof(state));
374 
375  for (v = 0; i < 128; v++) {
376  unsigned len = get_symbol(c, state, 0) + 1U;
377 
378  if (len > 128 - i || !len)
379  return AVERROR_INVALIDDATA;
380 
381  while (len--) {
382  quant_table[i] = scale * v;
383  i++;
384  }
385  }
386 
387  for (i = 1; i < 128; i++)
388  quant_table[256 - i] = -quant_table[i];
389  quant_table[128] = -quant_table[127];
390 
391  return 2 * v - 1;
392 }
393 
395  int16_t quant_table[MAX_CONTEXT_INPUTS][256])
396 {
397  int i;
398  int context_count = 1;
399 
400  for (i = 0; i < 5; i++) {
402  if (ret < 0)
403  return ret;
404  context_count *= ret;
405  if (context_count > 32768U) {
406  return AVERROR_INVALIDDATA;
407  }
408  }
409  return (context_count + 1) / 2;
410 }
411 
413 {
414  RangeCoder *const c = &f->c;
415  uint8_t state[CONTEXT_SIZE];
416  int i, j, k, ret;
417  uint8_t state2[32][CONTEXT_SIZE];
418  unsigned crc = 0;
419 
420  memset(state2, 128, sizeof(state2));
421  memset(state, 128, sizeof(state));
422 
423  ff_init_range_decoder(c, f->avctx->extradata, f->avctx->extradata_size);
424  ff_build_rac_states(c, 0.05 * (1LL << 32), 256 - 8);
425 
426  f->version = get_symbol(c, state, 0);
427  if (f->version < 2) {
428  av_log(f->avctx, AV_LOG_ERROR, "Invalid version in global header\n");
429  return AVERROR_INVALIDDATA;
430  }
431  if (f->version > 2) {
432  c->bytestream_end -= 4;
433  f->micro_version = get_symbol(c, state, 0);
434  if (f->micro_version < 0)
435  return AVERROR_INVALIDDATA;
436  }
437  f->ac = get_symbol(c, state, 0);
438 
439  if (f->ac == AC_RANGE_CUSTOM_TAB) {
440  for (i = 1; i < 256; i++)
441  f->state_transition[i] = get_symbol(c, state, 1) + c->one_state[i];
442  }
443 
444  f->colorspace = get_symbol(c, state, 0); //YUV cs type
445  f->avctx->bits_per_raw_sample = get_symbol(c, state, 0);
446  f->chroma_planes = get_rac(c, state);
447  f->chroma_h_shift = get_symbol(c, state, 0);
448  f->chroma_v_shift = get_symbol(c, state, 0);
449  f->transparency = get_rac(c, state);
450  f->plane_count = 1 + (f->chroma_planes || f->version<4) + f->transparency;
451  f->num_h_slices = 1 + get_symbol(c, state, 0);
452  f->num_v_slices = 1 + get_symbol(c, state, 0);
453 
454  if (f->chroma_h_shift > 4U || f->chroma_v_shift > 4U) {
455  av_log(f->avctx, AV_LOG_ERROR, "chroma shift parameters %d %d are invalid\n",
456  f->chroma_h_shift, f->chroma_v_shift);
457  return AVERROR_INVALIDDATA;
458  }
459 
460  if (f->num_h_slices > (unsigned)f->width || !f->num_h_slices ||
461  f->num_v_slices > (unsigned)f->height || !f->num_v_slices
462  ) {
463  av_log(f->avctx, AV_LOG_ERROR, "slice count invalid\n");
464  return AVERROR_INVALIDDATA;
465  }
466 
467  f->quant_table_count = get_symbol(c, state, 0);
468  if (f->quant_table_count > (unsigned)MAX_QUANT_TABLES || !f->quant_table_count) {
469  av_log(f->avctx, AV_LOG_ERROR, "quant table count %d is invalid\n", f->quant_table_count);
470  f->quant_table_count = 0;
471  return AVERROR_INVALIDDATA;
472  }
473 
474  for (i = 0; i < f->quant_table_count; i++) {
475  f->context_count[i] = read_quant_tables(c, f->quant_tables[i]);
476  if (f->context_count[i] < 0) {
477  av_log(f->avctx, AV_LOG_ERROR, "read_quant_table error\n");
478  return AVERROR_INVALIDDATA;
479  }
480  }
482  return ret;
483 
484  for (i = 0; i < f->quant_table_count; i++)
485  if (get_rac(c, state)) {
486  for (j = 0; j < f->context_count[i]; j++)
487  for (k = 0; k < CONTEXT_SIZE; k++) {
488  int pred = j ? f->initial_states[i][j - 1][k] : 128;
489  f->initial_states[i][j][k] =
490  (pred + get_symbol(c, state2[k], 1)) & 0xFF;
491  }
492  }
493 
494  if (f->version > 2) {
495  f->ec = get_symbol(c, state, 0);
496  if (f->micro_version > 2)
497  f->intra = get_symbol(c, state, 0);
498  }
499 
500  if (f->version > 2) {
501  unsigned v;
503  f->avctx->extradata, f->avctx->extradata_size);
504  if (v || f->avctx->extradata_size < 4) {
505  av_log(f->avctx, AV_LOG_ERROR, "CRC mismatch %X!\n", v);
506  return AVERROR_INVALIDDATA;
507  }
508  crc = AV_RB32(f->avctx->extradata + f->avctx->extradata_size - 4);
509  }
510 
511  if (f->avctx->debug & FF_DEBUG_PICT_INFO)
512  av_log(f->avctx, AV_LOG_DEBUG,
513  "global: ver:%d.%d, coder:%d, colorspace: %d bpr:%d chroma:%d(%d:%d), alpha:%d slices:%dx%d qtabs:%d ec:%d intra:%d CRC:0x%08X\n",
514  f->version, f->micro_version,
515  f->ac,
516  f->colorspace,
517  f->avctx->bits_per_raw_sample,
518  f->chroma_planes, f->chroma_h_shift, f->chroma_v_shift,
519  f->transparency,
520  f->num_h_slices, f->num_v_slices,
521  f->quant_table_count,
522  f->ec,
523  f->intra,
524  crc
525  );
526  return 0;
527 }
528 
530 {
531  uint8_t state[CONTEXT_SIZE];
532  int i, j, context_count = -1; //-1 to avoid warning
533  RangeCoder *const c = &f->slice_context[0]->c;
534 
535  memset(state, 128, sizeof(state));
536 
537  if (f->version < 2) {
539  unsigned v= get_symbol(c, state, 0);
540  if (v >= 2) {
541  av_log(f->avctx, AV_LOG_ERROR, "invalid version %d in ver01 header\n", v);
542  return AVERROR_INVALIDDATA;
543  }
544  f->version = v;
545  f->ac = get_symbol(c, state, 0);
546 
547  if (f->ac == AC_RANGE_CUSTOM_TAB) {
548  for (i = 1; i < 256; i++) {
549  int st = get_symbol(c, state, 1) + c->one_state[i];
550  if (st < 1 || st > 255) {
551  av_log(f->avctx, AV_LOG_ERROR, "invalid state transition %d\n", st);
552  return AVERROR_INVALIDDATA;
553  }
554  f->state_transition[i] = st;
555  }
556  }
557 
558  colorspace = get_symbol(c, state, 0); //YUV cs type
559  bits_per_raw_sample = f->version > 0 ? get_symbol(c, state, 0) : f->avctx->bits_per_raw_sample;
564  if (colorspace == 0 && f->avctx->skip_alpha)
565  transparency = 0;
566 
567  if (f->plane_count) {
568  if (colorspace != f->colorspace ||
569  bits_per_raw_sample != f->avctx->bits_per_raw_sample ||
570  chroma_planes != f->chroma_planes ||
571  chroma_h_shift != f->chroma_h_shift ||
572  chroma_v_shift != f->chroma_v_shift ||
573  transparency != f->transparency) {
574  av_log(f->avctx, AV_LOG_ERROR, "Invalid change of global parameters\n");
575  return AVERROR_INVALIDDATA;
576  }
577  }
578 
579  if (chroma_h_shift > 4U || chroma_v_shift > 4U) {
580  av_log(f->avctx, AV_LOG_ERROR, "chroma shift parameters %d %d are invalid\n",
582  return AVERROR_INVALIDDATA;
583  }
584 
585  f->colorspace = colorspace;
586  f->avctx->bits_per_raw_sample = bits_per_raw_sample;
587  f->chroma_planes = chroma_planes;
588  f->chroma_h_shift = chroma_h_shift;
589  f->chroma_v_shift = chroma_v_shift;
590  f->transparency = transparency;
591 
592  f->plane_count = 2 + f->transparency;
593  }
594 
595  if (f->colorspace == 0) {
596  if (!f->transparency && !f->chroma_planes) {
597  if (f->avctx->bits_per_raw_sample <= 8)
598  f->avctx->pix_fmt = AV_PIX_FMT_GRAY8;
599  else if (f->avctx->bits_per_raw_sample == 9) {
600  f->packed_at_lsb = 1;
601  f->avctx->pix_fmt = AV_PIX_FMT_GRAY9;
602  } else if (f->avctx->bits_per_raw_sample == 10) {
603  f->packed_at_lsb = 1;
604  f->avctx->pix_fmt = AV_PIX_FMT_GRAY10;
605  } else if (f->avctx->bits_per_raw_sample == 12) {
606  f->packed_at_lsb = 1;
607  f->avctx->pix_fmt = AV_PIX_FMT_GRAY12;
608  } else if (f->avctx->bits_per_raw_sample == 16) {
609  f->packed_at_lsb = 1;
610  f->avctx->pix_fmt = AV_PIX_FMT_GRAY16;
611  } else if (f->avctx->bits_per_raw_sample < 16) {
612  f->avctx->pix_fmt = AV_PIX_FMT_GRAY16;
613  } else
614  return AVERROR(ENOSYS);
615  } else if (f->transparency && !f->chroma_planes) {
616  if (f->avctx->bits_per_raw_sample <= 8)
617  f->avctx->pix_fmt = AV_PIX_FMT_YA8;
618  else
619  return AVERROR(ENOSYS);
620  } else if (f->avctx->bits_per_raw_sample<=8 && !f->transparency) {
621  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
622  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P; break;
623  case 0x01: f->avctx->pix_fmt = AV_PIX_FMT_YUV440P; break;
624  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P; break;
625  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P; break;
626  case 0x20: f->avctx->pix_fmt = AV_PIX_FMT_YUV411P; break;
627  case 0x22: f->avctx->pix_fmt = AV_PIX_FMT_YUV410P; break;
628  }
629  } else if (f->avctx->bits_per_raw_sample <= 8 && f->transparency) {
630  switch(16*f->chroma_h_shift + f->chroma_v_shift) {
631  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUVA444P; break;
632  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUVA422P; break;
633  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUVA420P; break;
634  }
635  } else if (f->avctx->bits_per_raw_sample == 9 && !f->transparency) {
636  f->packed_at_lsb = 1;
637  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
638  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P9; break;
639  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P9; break;
640  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P9; break;
641  }
642  } else if (f->avctx->bits_per_raw_sample == 9 && f->transparency) {
643  f->packed_at_lsb = 1;
644  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
645  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUVA444P9; break;
646  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUVA422P9; break;
647  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUVA420P9; break;
648  }
649  } else if (f->avctx->bits_per_raw_sample == 10 && !f->transparency) {
650  f->packed_at_lsb = 1;
651  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
652  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P10; break;
653  case 0x01: f->avctx->pix_fmt = AV_PIX_FMT_YUV440P10; break;
654  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P10; break;
655  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P10; break;
656  }
657  } else if (f->avctx->bits_per_raw_sample == 10 && f->transparency) {
658  f->packed_at_lsb = 1;
659  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
660  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUVA444P10; break;
661  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUVA422P10; break;
662  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUVA420P10; break;
663  }
664  } else if (f->avctx->bits_per_raw_sample == 12 && !f->transparency) {
665  f->packed_at_lsb = 1;
666  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
667  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P12; break;
668  case 0x01: f->avctx->pix_fmt = AV_PIX_FMT_YUV440P12; break;
669  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P12; break;
670  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P12; break;
671  }
672  } else if (f->avctx->bits_per_raw_sample == 14 && !f->transparency) {
673  f->packed_at_lsb = 1;
674  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
675  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P14; break;
676  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P14; break;
677  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P14; break;
678  }
679  } else if (f->avctx->bits_per_raw_sample == 16 && !f->transparency){
680  f->packed_at_lsb = 1;
681  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
682  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P16; break;
683  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P16; break;
684  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P16; break;
685  }
686  } else if (f->avctx->bits_per_raw_sample == 16 && f->transparency){
687  f->packed_at_lsb = 1;
688  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
689  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUVA444P16; break;
690  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUVA422P16; break;
691  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUVA420P16; break;
692  }
693  }
694  } else if (f->colorspace == 1) {
695  if (f->chroma_h_shift || f->chroma_v_shift) {
696  av_log(f->avctx, AV_LOG_ERROR,
697  "chroma subsampling not supported in this colorspace\n");
698  return AVERROR(ENOSYS);
699  }
700  if ( f->avctx->bits_per_raw_sample <= 8 && !f->transparency)
701  f->avctx->pix_fmt = AV_PIX_FMT_0RGB32;
702  else if (f->avctx->bits_per_raw_sample <= 8 && f->transparency)
703  f->avctx->pix_fmt = AV_PIX_FMT_RGB32;
704  else if (f->avctx->bits_per_raw_sample == 9 && !f->transparency)
705  f->avctx->pix_fmt = AV_PIX_FMT_GBRP9;
706  else if (f->avctx->bits_per_raw_sample == 10 && !f->transparency)
707  f->avctx->pix_fmt = AV_PIX_FMT_GBRP10;
708  else if (f->avctx->bits_per_raw_sample == 10 && f->transparency)
709  f->avctx->pix_fmt = AV_PIX_FMT_GBRAP10;
710  else if (f->avctx->bits_per_raw_sample == 12 && !f->transparency)
711  f->avctx->pix_fmt = AV_PIX_FMT_GBRP12;
712  else if (f->avctx->bits_per_raw_sample == 12 && f->transparency)
713  f->avctx->pix_fmt = AV_PIX_FMT_GBRAP12;
714  else if (f->avctx->bits_per_raw_sample == 14 && !f->transparency)
715  f->avctx->pix_fmt = AV_PIX_FMT_GBRP14;
716  else if (f->avctx->bits_per_raw_sample == 16 && !f->transparency) {
717  f->avctx->pix_fmt = AV_PIX_FMT_GBRP16;
718  f->use32bit = 1;
719  }
720  else if (f->avctx->bits_per_raw_sample == 16 && f->transparency) {
721  f->avctx->pix_fmt = AV_PIX_FMT_GBRAP16;
722  f->use32bit = 1;
723  }
724  } else {
725  av_log(f->avctx, AV_LOG_ERROR, "colorspace not supported\n");
726  return AVERROR(ENOSYS);
727  }
728  if (f->avctx->pix_fmt == AV_PIX_FMT_NONE) {
729  av_log(f->avctx, AV_LOG_ERROR, "format not supported\n");
730  return AVERROR(ENOSYS);
731  }
732 
733  ff_dlog(f->avctx, "%d %d %d\n",
734  f->chroma_h_shift, f->chroma_v_shift, f->avctx->pix_fmt);
735  if (f->version < 2) {
736  context_count = read_quant_tables(c, f->quant_table);
737  if (context_count < 0) {
738  av_log(f->avctx, AV_LOG_ERROR, "read_quant_table error\n");
739  return AVERROR_INVALIDDATA;
740  }
741  f->slice_count = f->max_slice_count;
742  } else if (f->version < 3) {
743  f->slice_count = get_symbol(c, state, 0);
744  } else {
745  const uint8_t *p = c->bytestream_end;
746  for (f->slice_count = 0;
747  f->slice_count < MAX_SLICES && 3 + 5*!!f->ec < p - c->bytestream_start;
748  f->slice_count++) {
749  int trailer = 3 + 5*!!f->ec;
750  int size = AV_RB24(p-trailer);
751  if (size + trailer > p - c->bytestream_start)
752  break;
753  p -= size + trailer;
754  }
755  }
756  if (f->slice_count > (unsigned)MAX_SLICES || f->slice_count <= 0 || f->slice_count > f->max_slice_count) {
757  av_log(f->avctx, AV_LOG_ERROR, "slice count %d is invalid (max=%d)\n", f->slice_count, f->max_slice_count);
758  return AVERROR_INVALIDDATA;
759  }
760 
761  for (j = 0; j < f->slice_count; j++) {
762  FFV1Context *fs = f->slice_context[j];
763  fs->ac = f->ac;
764  fs->packed_at_lsb = f->packed_at_lsb;
765 
766  fs->slice_damaged = 0;
767 
768  if (f->version == 2) {
769  fs->slice_x = get_symbol(c, state, 0) * f->width ;
770  fs->slice_y = get_symbol(c, state, 0) * f->height;
771  fs->slice_width = (get_symbol(c, state, 0) + 1) * f->width + fs->slice_x;
772  fs->slice_height = (get_symbol(c, state, 0) + 1) * f->height + fs->slice_y;
773 
774  fs->slice_x /= f->num_h_slices;
775  fs->slice_y /= f->num_v_slices;
776  fs->slice_width = fs->slice_width / f->num_h_slices - fs->slice_x;
777  fs->slice_height = fs->slice_height / f->num_v_slices - fs->slice_y;
778  if ((unsigned)fs->slice_width > f->width ||
779  (unsigned)fs->slice_height > f->height)
780  return AVERROR_INVALIDDATA;
781  if ( (unsigned)fs->slice_x + (uint64_t)fs->slice_width > f->width
782  || (unsigned)fs->slice_y + (uint64_t)fs->slice_height > f->height)
783  return AVERROR_INVALIDDATA;
784  }
785 
786  for (i = 0; i < f->plane_count; i++) {
787  PlaneContext *const p = &fs->plane[i];
788 
789  if (f->version == 2) {
790  int idx = get_symbol(c, state, 0);
791  if (idx >= (unsigned)f->quant_table_count) {
792  av_log(f->avctx, AV_LOG_ERROR,
793  "quant_table_index out of range\n");
794  return AVERROR_INVALIDDATA;
795  }
796  p->quant_table_index = idx;
797  memcpy(p->quant_table, f->quant_tables[idx],
798  sizeof(p->quant_table));
799  context_count = f->context_count[idx];
800  } else {
801  memcpy(p->quant_table, f->quant_table, sizeof(p->quant_table));
802  }
803 
804  if (f->version <= 2) {
806  if (p->context_count < context_count) {
807  av_freep(&p->state);
808  av_freep(&p->vlc_state);
809  }
811  }
812  }
813  }
814  return 0;
815 }
816 
818 {
820  int ret;
821 
822  if ((ret = ff_ffv1_common_init(avctx)) < 0)
823  return ret;
824 
825  if (avctx->extradata_size > 0 && (ret = read_extra_header(f)) < 0)
826  return ret;
827 
828  if ((ret = ff_ffv1_init_slice_contexts(f)) < 0)
829  return ret;
830 
831  return 0;
832 }
833 
835  int *got_frame, AVPacket *avpkt)
836 {
837  uint8_t *buf = avpkt->data;
838  int buf_size = avpkt->size;
840  RangeCoder *const c = &f->slice_context[0]->c;
841  int i, ret;
842  uint8_t keystate = 128;
843  uint8_t *buf_p;
844  AVFrame *p;
845 
846  if (f->last_picture.f)
847  ff_thread_release_ext_buffer(avctx, &f->last_picture);
848  FFSWAP(ThreadFrame, f->picture, f->last_picture);
849 
850  f->cur = p = f->picture.f;
851 
852  if (f->version < 3 && avctx->field_order > AV_FIELD_PROGRESSIVE) {
853  /* we have interlaced material flagged in container */
854  p->interlaced_frame = 1;
856  p->top_field_first = 1;
857  }
858 
859  f->avctx = avctx;
860  ff_init_range_decoder(c, buf, buf_size);
861  ff_build_rac_states(c, 0.05 * (1LL << 32), 256 - 8);
862 
863  p->pict_type = AV_PICTURE_TYPE_I; //FIXME I vs. P
864  if (get_rac(c, &keystate)) {
865  p->key_frame = 1;
866  f->key_frame_ok = 0;
867  if ((ret = read_header(f)) < 0)
868  return ret;
869  f->key_frame_ok = 1;
870  } else {
871  if (!f->key_frame_ok) {
873  "Cannot decode non-keyframe without valid keyframe\n");
874  return AVERROR_INVALIDDATA;
875  }
876  p->key_frame = 0;
877  }
878 
880  if (ret < 0)
881  return ret;
882 
884  av_log(avctx, AV_LOG_DEBUG, "ver:%d keyframe:%d coder:%d ec:%d slices:%d bps:%d\n",
885  f->version, p->key_frame, f->ac, f->ec, f->slice_count, f->avctx->bits_per_raw_sample);
886 
888 
889  buf_p = buf + buf_size;
890  for (i = f->slice_count - 1; i >= 0; i--) {
891  FFV1Context *fs = f->slice_context[i];
892  int trailer = 3 + 5*!!f->ec;
893  int v;
894 
895  if (i || f->version > 2) {
896  if (trailer > buf_p - buf) v = INT_MAX;
897  else v = AV_RB24(buf_p-trailer) + trailer;
898  } else v = buf_p - c->bytestream_start;
899  if (buf_p - c->bytestream_start < v) {
900  av_log(avctx, AV_LOG_ERROR, "Slice pointer chain broken\n");
901  ff_thread_report_progress(&f->picture, INT_MAX, 0);
902  return AVERROR_INVALIDDATA;
903  }
904  buf_p -= v;
905 
906  if (f->ec) {
907  unsigned crc = av_crc(av_crc_get_table(AV_CRC_32_IEEE), 0, buf_p, v);
908  if (crc) {
909  int64_t ts = avpkt->pts != AV_NOPTS_VALUE ? avpkt->pts : avpkt->dts;
910  av_log(f->avctx, AV_LOG_ERROR, "slice CRC mismatch %X!", crc);
911  if (ts != AV_NOPTS_VALUE && avctx->pkt_timebase.num) {
912  av_log(f->avctx, AV_LOG_ERROR, "at %f seconds\n", ts*av_q2d(avctx->pkt_timebase));
913  } else if (ts != AV_NOPTS_VALUE) {
914  av_log(f->avctx, AV_LOG_ERROR, "at %"PRId64"\n", ts);
915  } else {
916  av_log(f->avctx, AV_LOG_ERROR, "\n");
917  }
918  fs->slice_damaged = 1;
919  }
920  if (avctx->debug & FF_DEBUG_PICT_INFO) {
921  av_log(avctx, AV_LOG_DEBUG, "slice %d, CRC: 0x%08"PRIX32"\n", i, AV_RB32(buf_p + v - 4));
922  }
923  }
924 
925  if (i) {
926  ff_init_range_decoder(&fs->c, buf_p, v);
927  } else
928  fs->c.bytestream_end = buf_p + v;
929 
930  fs->avctx = avctx;
931  }
932 
934  decode_slice,
935  &f->slice_context[0],
936  NULL,
937  f->slice_count,
938  sizeof(void*));
939 
940  for (i = f->slice_count - 1; i >= 0; i--) {
941  FFV1Context *fs = f->slice_context[i];
942  int j;
943  if (fs->slice_damaged && f->last_picture.f->data[0]) {
945  const uint8_t *src[4];
946  uint8_t *dst[4];
947  ff_thread_await_progress(&f->last_picture, INT_MAX, 0);
948  for (j = 0; j < desc->nb_components; j++) {
949  int pixshift = desc->comp[j].depth > 8;
950  int sh = (j == 1 || j == 2) ? f->chroma_h_shift : 0;
951  int sv = (j == 1 || j == 2) ? f->chroma_v_shift : 0;
952  dst[j] = p->data[j] + p->linesize[j] *
953  (fs->slice_y >> sv) + ((fs->slice_x >> sh) << pixshift);
954  src[j] = f->last_picture.f->data[j] + f->last_picture.f->linesize[j] *
955  (fs->slice_y >> sv) + ((fs->slice_x >> sh) << pixshift);
956 
957  }
958  if (desc->flags & AV_PIX_FMT_FLAG_PAL) {
959  dst[1] = p->data[1];
960  src[1] = f->last_picture.f->data[1];
961  }
962  av_image_copy(dst, p->linesize, src,
963  f->last_picture.f->linesize,
964  avctx->pix_fmt,
965  fs->slice_width,
966  fs->slice_height);
967  }
968  }
969  ff_thread_report_progress(&f->picture, INT_MAX, 0);
970 
971  if (f->last_picture.f)
972  ff_thread_release_ext_buffer(avctx, &f->last_picture);
973  if ((ret = av_frame_ref(rframe, f->picture.f)) < 0)
974  return ret;
975 
976  *got_frame = 1;
977 
978  return buf_size;
979 }
980 
981 static void copy_fields(FFV1Context *fsdst, const FFV1Context *fssrc,
982  const FFV1Context *fsrc)
983 {
984  fsdst->version = fsrc->version;
985  fsdst->micro_version = fsrc->micro_version;
986  fsdst->chroma_planes = fsrc->chroma_planes;
989  fsdst->transparency = fsrc->transparency;
990  fsdst->plane_count = fsrc->plane_count;
991  fsdst->ac = fsrc->ac;
992  fsdst->colorspace = fsrc->colorspace;
993 
994  fsdst->ec = fsrc->ec;
995  fsdst->intra = fsrc->intra;
996  fsdst->slice_damaged = fssrc->slice_damaged;
997  fsdst->key_frame_ok = fsrc->key_frame_ok;
998 
999  fsdst->packed_at_lsb = fsrc->packed_at_lsb;
1000  fsdst->slice_count = fsrc->slice_count;
1001  if (fsrc->version<3){
1002  fsdst->slice_x = fssrc->slice_x;
1003  fsdst->slice_y = fssrc->slice_y;
1004  fsdst->slice_width = fssrc->slice_width;
1005  fsdst->slice_height = fssrc->slice_height;
1006  }
1007 }
1008 
1009 #if HAVE_THREADS
1010 static int update_thread_context(AVCodecContext *dst, const AVCodecContext *src)
1011 {
1012  FFV1Context *fsrc = src->priv_data;
1013  FFV1Context *fdst = dst->priv_data;
1014  int i, ret;
1015 
1016  if (dst == src)
1017  return 0;
1018 
1019  {
1021  uint8_t (*initial_states[MAX_QUANT_TABLES])[32];
1023  memcpy(initial_states, fdst->initial_states, sizeof(fdst->initial_states));
1024  memcpy(slice_context, fdst->slice_context , sizeof(fdst->slice_context));
1025 
1026  memcpy(fdst, fsrc, sizeof(*fdst));
1027  memcpy(fdst->initial_states, initial_states, sizeof(fdst->initial_states));
1028  memcpy(fdst->slice_context, slice_context , sizeof(fdst->slice_context));
1029  fdst->picture = picture;
1030  fdst->last_picture = last_picture;
1031  for (i = 0; i<fdst->num_h_slices * fdst->num_v_slices; i++) {
1032  FFV1Context *fssrc = fsrc->slice_context[i];
1033  FFV1Context *fsdst = fdst->slice_context[i];
1034  copy_fields(fsdst, fssrc, fsrc);
1035  }
1036  av_assert0(!fdst->plane[0].state);
1037  av_assert0(!fdst->sample_buffer);
1038  }
1039 
1041 
1042 
1044  if (fsrc->picture.f->data[0]) {
1045  if ((ret = ff_thread_ref_frame(&fdst->picture, &fsrc->picture)) < 0)
1046  return ret;
1047  }
1048 
1049  fdst->fsrc = fsrc;
1050 
1051  return 0;
1052 }
1053 #endif
1054 
1056  .p.name = "ffv1",
1057  .p.long_name = NULL_IF_CONFIG_SMALL("FFmpeg video codec #1"),
1058  .p.type = AVMEDIA_TYPE_VIDEO,
1059  .p.id = AV_CODEC_ID_FFV1,
1060  .priv_data_size = sizeof(FFV1Context),
1061  .init = decode_init,
1062  .close = ff_ffv1_close,
1064  .update_thread_context = ONLY_IF_THREADS_ENABLED(update_thread_context),
1065  .p.capabilities = AV_CODEC_CAP_DR1 /*| AV_CODEC_CAP_DRAW_HORIZ_BAND*/ |
1069 };
AV_PIX_FMT_YUVA422P16
#define AV_PIX_FMT_YUVA422P16
Definition: pixfmt.h:449
read_extra_header
static int read_extra_header(FFV1Context *f)
Definition: ffv1dec.c:412
AV_PIX_FMT_GBRAP16
#define AV_PIX_FMT_GBRAP16
Definition: pixfmt.h:428
FFV1Context::chroma_v_shift
int chroma_v_shift
Definition: ffv1.h:85
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
FFV1Context::key_frame_ok
int key_frame_ok
Definition: ffv1.h:113
update_vlc_state
static void update_vlc_state(VlcState *const state, const int v)
Definition: ffv1.h:156
FF_CODEC_CAP_INIT_CLEANUP
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
Definition: codec_internal.h:39
get_bits_left
static int get_bits_left(GetBitContext *gb)
Definition: get_bits.h:839
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
decode_slice
static int decode_slice(AVCodecContext *c, void *arg)
Definition: ffv1dec.c:245
opt.h
AV_PIX_FMT_YA8
@ AV_PIX_FMT_YA8
8 bits gray, 8 bits alpha
Definition: pixfmt.h:133
ff_thread_release_ext_buffer
void ff_thread_release_ext_buffer(AVCodecContext *avctx, ThreadFrame *f)
Unref a ThreadFrame.
Definition: pthread_frame.c:1141
is_input_end
static int is_input_end(FFV1Context *s)
Definition: ffv1dec.c:97
FFV1Context::context_count
int context_count[MAX_QUANT_TABLES]
Definition: ffv1.h:100
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2662
MAX_OVERREAD
#define MAX_OVERREAD
Definition: lagarithrac.h:50
FFV1Context::ec
int ec
Definition: ffv1.h:110
FFV1Context::gb
GetBitContext gb
Definition: ffv1.h:77
AV_PIX_FMT_YUVA422P9
#define AV_PIX_FMT_YUVA422P9
Definition: pixfmt.h:441
get_sr_golomb
static int get_sr_golomb(GetBitContext *gb, int k, int limit, int esc_len)
read signed golomb rice code (ffv1).
Definition: golomb.h:529
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:325
pixdesc.h
ff_ffv1_common_init
av_cold int ff_ffv1_common_init(AVCodecContext *avctx)
Definition: ffv1.c:36
AV_PIX_FMT_YUVA420P16
#define AV_PIX_FMT_YUVA420P16
Definition: pixfmt.h:448
w
uint8_t w
Definition: llviddspenc.c:38
FFV1Context::last_picture
ThreadFrame last_picture
Definition: ffv1.h:90
AVPacket::data
uint8_t * data
Definition: packet.h:374
AV_PIX_FMT_YUVA420P10
#define AV_PIX_FMT_YUVA420P10
Definition: pixfmt.h:443
AVCodecContext::field_order
enum AVFieldOrder field_order
Field order.
Definition: avcodec.h:995
FFV1Context::slice_x
int slice_x
Definition: ffv1.h:129
AVFrame::top_field_first
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:482
last_picture
enum AVPictureType last_picture
Definition: movenc.c:69
rangecoder.h
AVComponentDescriptor::step
int step
Number of elements between 2 horizontally consecutive pixels.
Definition: pixdesc.h:40
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:406
PlaneContext::state
uint8_t(* state)[CONTEXT_SIZE]
Definition: ffv1.h:66
FFCodec
Definition: codec_internal.h:112
FFV1Context::num_h_slices
int num_h_slices
Definition: ffv1.h:126
AV_PIX_FMT_YUV440P
@ AV_PIX_FMT_YUV440P
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:99
FFV1Context::slice_context
struct FFV1Context * slice_context[MAX_SLICES]
Definition: ffv1.h:122
read_quant_table
static int read_quant_table(RangeCoder *c, int16_t *quant_table, int scale)
Definition: ffv1dec.c:367
AC_RANGE_CUSTOM_TAB
#define AC_RANGE_CUSTOM_TAB
Definition: ffv1.h:52
AV_PIX_FMT_YUVA422P10
#define AV_PIX_FMT_YUVA422P10
Definition: pixfmt.h:444
init_get_bits
static int init_get_bits(GetBitContext *s, const uint8_t *buffer, int bit_size)
Initialize GetBitContext.
Definition: get_bits.h:649
thread.h
ff_thread_await_progress
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before ff_thread_await_progress() has been called on them. reget_buffer() and buffer age optimizations no longer work. *The contents of buffers must not be written to after ff_thread_report_progress() has been called on them. This includes draw_edges(). Porting codecs to frame threading
ThreadFrame::f
AVFrame * f
Definition: threadframe.h:28
FFV1Context::chroma_h_shift
int chroma_h_shift
Definition: ffv1.h:85
FF_DEBUG_PICT_INFO
#define FF_DEBUG_PICT_INFO
Definition: avcodec.h:1323
AV_PIX_FMT_GRAY9
#define AV_PIX_FMT_GRAY9
Definition: pixfmt.h:386
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:346
init
static int init
Definition: av_tx.c:47
crc.h
golomb.h
exp golomb vlc stuff
AV_PIX_FMT_YUVA420P9
#define AV_PIX_FMT_YUVA420P9
Definition: pixfmt.h:440
AV_FIELD_TT
@ AV_FIELD_TT
Definition: codec_par.h:40
FFCodec::p
AVCodec p
The public AVCodec.
Definition: codec_internal.h:116
AV_PIX_FMT_GBRP14
#define AV_PIX_FMT_GBRP14
Definition: pixfmt.h:424
U
#define U(x)
Definition: vp56_arith.h:37
AV_PIX_FMT_GBRP10
#define AV_PIX_FMT_GBRP10
Definition: pixfmt.h:422
AV_PIX_FMT_YUVA444P16
#define AV_PIX_FMT_YUVA444P16
Definition: pixfmt.h:450
GetBitContext
Definition: get_bits.h:61
AV_PIX_FMT_YUV422P9
#define AV_PIX_FMT_YUV422P9
Definition: pixfmt.h:404
AVFrame::key_frame
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:417
av_noinline
#define av_noinline
Definition: attributes.h:72
CONTEXT_SIZE
#define CONTEXT_SIZE
Definition: ffv1.h:45
scale
static av_always_inline float scale(float x, float s)
Definition: vf_v360.c:1389
AV_PIX_FMT_GRAY16
#define AV_PIX_FMT_GRAY16
Definition: pixfmt.h:390
get_symbol_inline
static av_flatten int get_symbol_inline(RangeCoder *c, uint8_t *state, int is_signed)
Definition: ffv1dec.c:43
AV_FIELD_TB
@ AV_FIELD_TB
Definition: codec_par.h:42
FFV1Context::chroma_planes
int chroma_planes
Definition: ffv1.h:84
PlaneContext::context_count
int context_count
Definition: ffv1.h:65
AVRational::num
int num
Numerator.
Definition: rational.h:59
ff_ffv1_clear_slice_state
void ff_ffv1_clear_slice_state(const FFV1Context *f, FFV1Context *fs)
Definition: ffv1.c:168
AV_PIX_FMT_YUV444P10
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:409
avassert.h
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
av_cold
#define av_cold
Definition: attributes.h:90
AV_PIX_FMT_YUV422P16
#define AV_PIX_FMT_YUV422P16
Definition: pixfmt.h:418
ff_thread_report_progress
void ff_thread_report_progress(ThreadFrame *f, int n, int field)
Notify later decoding threads when part of their reference picture is ready.
Definition: pthread_frame.c:595
FFV1Context::bits_per_raw_sample
int bits_per_raw_sample
Definition: ffv1.h:116
AV_PIX_FMT_GBRAP10
#define AV_PIX_FMT_GBRAP10
Definition: pixfmt.h:426
AVCodecContext::extradata_size
int extradata_size
Definition: avcodec.h:491
width
#define width
FF_CODEC_DECODE_CB
#define FF_CODEC_DECODE_CB(func)
Definition: codec_internal.h:254
s
#define s(width, name)
Definition: cbs_vp9.c:256
AV_PIX_FMT_GBRAP12
#define AV_PIX_FMT_GBRAP12
Definition: pixfmt.h:427
FFV1Context::slice_count
int slice_count
Definition: ffv1.h:123
AV_PIX_FMT_YUVA420P
@ AV_PIX_FMT_YUVA420P
planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
Definition: pixfmt.h:101
AV_PIX_FMT_YUV444P16
#define AV_PIX_FMT_YUV444P16
Definition: pixfmt.h:419
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:50
AV_GET_BUFFER_FLAG_REF
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
Definition: avcodec.h:367
decode_frame
static int decode_frame(AVCodecContext *avctx, AVFrame *rframe, int *got_frame, AVPacket *avpkt)
Definition: ffv1dec.c:834
av_q2d
static double av_q2d(AVRational a)
Convert an AVRational to a double.
Definition: rational.h:104
FFV1Context::plane
PlaneContext plane[MAX_PLANES]
Definition: ffv1.h:97
FFV1Context::max_slice_count
int max_slice_count
Definition: ffv1.h:124
FFV1Context::slice_damaged
int slice_damaged
Definition: ffv1.h:112
bits
uint8_t bits
Definition: vp3data.h:141
FFV1Context::intra
int intra
Definition: ffv1.h:111
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
FFV1Context::fsrc
struct FFV1Context * fsrc
Definition: ffv1.h:91
AV_PIX_FMT_YUV420P9
#define AV_PIX_FMT_YUV420P9
Definition: pixfmt.h:403
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
read_quant_tables
static int read_quant_tables(RangeCoder *c, int16_t quant_table[MAX_CONTEXT_INPUTS][256])
Definition: ffv1dec.c:394
AV_PIX_FMT_YUV420P16
#define AV_PIX_FMT_YUV420P16
Definition: pixfmt.h:417
get_bits.h
fold
static av_always_inline int fold(int diff, int bits)
Definition: ffv1.h:145
FFV1Context::ac
int ac
1=range coder <-> 0=golomb rice
Definition: ffv1.h:95
get_vlc_symbol
static int get_vlc_symbol(GetBitContext *gb, VlcState *const state, int bits)
Definition: ffv1dec.c:72
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
FFV1Context::plane_count
int plane_count
Definition: ffv1.h:94
ff_thread_ref_frame
int ff_thread_ref_frame(ThreadFrame *dst, const ThreadFrame *src)
Definition: utils.c:891
arg
const char * arg
Definition: jacosubdec.c:67
AV_PIX_FMT_GRAY10
#define AV_PIX_FMT_GRAY10
Definition: pixfmt.h:387
if
if(ret)
Definition: filter_design.txt:179
AV_CODEC_CAP_FRAME_THREADS
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: codec.h:113
threadframe.h
FFV1Context::slice_height
int slice_height
Definition: ffv1.h:128
quant_table
static const int16_t quant_table[64]
Definition: intrax8.c:522
AV_PIX_FMT_GBRP16
#define AV_PIX_FMT_GBRP16
Definition: pixfmt.h:425
read_header
static int read_header(FFV1Context *f)
Definition: ffv1dec.c:529
get_symbol
static av_noinline int get_symbol(RangeCoder *c, uint8_t *state, int is_signed)
Definition: ffv1dec.c:67
NULL
#define NULL
Definition: coverity.c:32
PlaneContext::vlc_state
VlcState * vlc_state
Definition: ffv1.h:67
AC_GOLOMB_RICE
#define AC_GOLOMB_RICE
Definition: ffv1.h:50
fs
#define fs(width, name, subs,...)
Definition: cbs_vp9.c:258
FFV1Context::num_v_slices
int num_v_slices
Definition: ffv1.h:125
FFV1Context::colorspace
int colorspace
Definition: ffv1.h:104
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:274
ff_ffv1_decoder
const FFCodec ff_ffv1_decoder
Definition: ffv1dec.c:1055
AV_PIX_FMT_YUV440P10
#define AV_PIX_FMT_YUV440P10
Definition: pixfmt.h:408
mathops.h
PlaneContext
Definition: ffv1.h:62
ONLY_IF_THREADS_ENABLED
#define ONLY_IF_THREADS_ENABLED(x)
Define a function with only the non-default version specified.
Definition: internal.h:156
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:407
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:74
AV_PIX_FMT_GBRP9
#define AV_PIX_FMT_GBRP9
Definition: pixfmt.h:421
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
planes
static const struct @328 planes[]
VlcState
Definition: ffv1.h:55
ff_dlog
#define ff_dlog(a,...)
Definition: tableprint_vlc.h:28
FFV1Context::slice_width
int slice_width
Definition: ffv1.h:127
ff_init_range_decoder
av_cold void ff_init_range_decoder(RangeCoder *c, const uint8_t *buf, int buf_size)
Definition: rangecoder.c:53
AV_CODEC_ID_FFV1
@ AV_CODEC_ID_FFV1
Definition: codec_id.h:83
f
f
Definition: af_crystalizer.c:122
AVFrame::pict_type
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:422
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
AVPacket::size
int size
Definition: packet.h:375
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:117
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:343
codec_internal.h
AV_PIX_FMT_YUV422P12
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:411
ff_ffv1_close
av_cold int ff_ffv1_close(AVCodecContext *avctx)
Definition: ffv1.c:196
AVCodecContext::pkt_timebase
AVRational pkt_timebase
Timebase in which pkt_dts/pts and AVPacket.dts/pts are.
Definition: avcodec.h:1746
sample
#define sample
Definition: flacdsp_template.c:44
FFV1Context::picture
ThreadFrame picture
Definition: ffv1.h:90
size
int size
Definition: twinvq_data.h:10344
ff_build_rac_states
void ff_build_rac_states(RangeCoder *c, int factor, int max_p)
Definition: rangecoder.c:68
FF_CODEC_CAP_ALLOCATE_PROGRESS
#define FF_CODEC_CAP_ALLOCATE_PROGRESS
Definition: codec_internal.h:66
state
static struct @327 state
AV_NOPTS_VALUE
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:248
AV_RB32
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_WL16 uint64_t_TMPL AV_WB64 unsigned int_TMPL AV_RB32
Definition: bytestream.h:96
AV_PIX_FMT_YUV444P12
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:413
FFV1Context::sample_buffer
int16_t * sample_buffer
Definition: ffv1.h:105
AVPacket::dts
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed.
Definition: packet.h:373
height
#define height
AV_PIX_FMT_RGB32
#define AV_PIX_FMT_RGB32
Definition: pixfmt.h:379
a
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:41
AV_PIX_FMT_YUVA444P
@ AV_PIX_FMT_YUVA444P
planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
Definition: pixfmt.h:167
av_crc_get_table
const AVCRC * av_crc_get_table(AVCRCId crc_id)
Get an initialized standard CRC table.
Definition: crc.c:374
AV_CODEC_CAP_SLICE_THREADS
#define AV_CODEC_CAP_SLICE_THREADS
Codec supports slice-based (or partition-based) multithreading.
Definition: codec.h:117
AV_PIX_FMT_YUVA444P10
#define AV_PIX_FMT_YUVA444P10
Definition: pixfmt.h:445
PlaneContext::quant_table_index
int quant_table_index
Definition: ffv1.h:64
FFV1Context::initial_states
uint8_t(*[MAX_QUANT_TABLES] initial_states)[32]
Definition: ffv1.h:102
AVFrame::interlaced_frame
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:477
decode_plane
static int decode_plane(FFV1Context *s, uint8_t *src, int w, int h, int stride, int plane_index, int pixel_stride)
Definition: ffv1dec.c:120
copy_fields
static void copy_fields(FFV1Context *fsdst, const FFV1Context *fssrc, const FFV1Context *fsrc)
Definition: ffv1dec.c:981
ff_ffv1_init_slice_state
av_cold int ff_ffv1_init_slice_state(const FFV1Context *f, FFV1Context *fs)
Definition: ffv1.c:61
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:367
av_flatten
#define av_flatten
Definition: attributes.h:96
AV_PIX_FMT_GBRP12
#define AV_PIX_FMT_GBRP12
Definition: pixfmt.h:423
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:31
av_assert1
#define av_assert1(cond)
assert() equivalent, that does not lie in speed critical code.
Definition: avassert.h:53
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
FF_CODEC_CAP_INIT_THREADSAFE
#define FF_CODEC_CAP_INIT_THREADSAFE
The codec does not modify any global variables in the init function, allowing to call the init functi...
Definition: codec_internal.h:31
FFV1Context::slice_y
int slice_y
Definition: ffv1.h:130
ffv1.h
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:203
update_thread_context
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have update_thread_context() run it in the next thread. Add AV_CODEC_CAP_FRAME_THREADS to the codec capabilities. There will be very little speed gain at this point but it should work. If there are inter-frame dependencies
len
int len
Definition: vorbis_enc_data.h:426
get_rac
static int get_rac(RangeCoder *c, uint8_t *const state)
Definition: rangecoder.h:127
AV_CRC_32_IEEE
@ AV_CRC_32_IEEE
Definition: crc.h:52
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:599
ff_thread_get_ext_buffer
int ff_thread_get_ext_buffer(AVCodecContext *avctx, ThreadFrame *f, int flags)
Wrapper around ff_get_buffer() for frame-multithreaded codecs.
Definition: pthread_frame.c:1043
AV_PIX_FMT_YUV444P9
#define AV_PIX_FMT_YUV444P9
Definition: pixfmt.h:405
MAX_CONTEXT_INPUTS
#define MAX_CONTEXT_INPUTS
Definition: ffv1.h:48
FFV1Context::packed_at_lsb
int packed_at_lsb
Definition: ffv1.h:117
avcodec.h
stride
#define stride
Definition: h264pred_template.c:537
FFV1Context::avctx
AVCodecContext * avctx
Definition: ffv1.h:75
ret
ret
Definition: filter_design.txt:187
pred
static const float pred[4]
Definition: siprdata.h:259
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
AV_PIX_FMT_0RGB32
#define AV_PIX_FMT_0RGB32
Definition: pixfmt.h:383
AV_PIX_FMT_YUVA444P9
#define AV_PIX_FMT_YUVA444P9
Definition: pixfmt.h:442
ff_thread_finish_setup
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call ff_thread_finish_setup() afterwards. If some code can 't be moved
AV_PIX_FMT_YUV420P12
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:410
AV_PIX_FMT_YUV422P14
#define AV_PIX_FMT_YUV422P14
Definition: pixfmt.h:415
ff_ffv1_allocate_initial_states
int ff_ffv1_allocate_initial_states(FFV1Context *f)
Definition: ffv1.c:153
AVCodecContext
main external API structure.
Definition: avcodec.h:389
MAX_SLICES
#define MAX_SLICES
Definition: dxva2_hevc.c:31
ThreadFrame
Definition: threadframe.h:27
AVCodecContext::execute
int(* execute)(struct AVCodecContext *c, int(*func)(struct AVCodecContext *c2, void *arg), void *arg2, int *ret, int count, int size)
The codec may call this to execute several independent things.
Definition: avcodec.h:1514
av_image_copy
void av_image_copy(uint8_t *dst_data[4], int dst_linesizes[4], const uint8_t *src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Copy image in src_data to dst_data.
Definition: imgutils.c:422
decode_init
static av_cold int decode_init(AVCodecContext *avctx)
Definition: ffv1dec.c:817
av_crc
uint32_t av_crc(const AVCRC *ctx, uint32_t crc, const uint8_t *buffer, size_t length)
Calculate the CRC of a block.
Definition: crc.c:392
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
AVPixFmtDescriptor::comp
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
Definition: pixdesc.h:105
temp
else temp
Definition: vf_mcdeint.c:248
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
AVCodecContext::debug
int debug
debug
Definition: avcodec.h:1322
desc
const char * desc
Definition: libsvtav1.c:83
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
MAX_QUANT_TABLES
#define MAX_QUANT_TABLES
Definition: ffv1.h:47
AV_FIELD_PROGRESSIVE
@ AV_FIELD_PROGRESSIVE
Definition: codec_par.h:39
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
FFV1Context
Definition: ffv1.h:73
av_free
#define av_free(p)
Definition: tableprint_vlc.h:33
FFV1Context::transparency
int transparency
Definition: ffv1.h:86
AVPacket
This structure stores compressed data.
Definition: packet.h:351
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:416
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
AV_PIX_FMT_YUV411P
@ AV_PIX_FMT_YUV411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:73
ffv1dec_template.c
ff_ffv1_init_slice_contexts
av_cold int ff_ffv1_init_slice_contexts(FFV1Context *f)
Definition: ffv1.c:111
imgutils.h
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:370
AV_PIX_FMT_YUV410P
@ AV_PIX_FMT_YUV410P
planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
Definition: pixfmt.h:72
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:61
FFV1Context::micro_version
int micro_version
Definition: ffv1.h:82
AV_PIX_FMT_YUV440P12
#define AV_PIX_FMT_YUV440P12
Definition: pixfmt.h:412
h
h
Definition: vp9dsp_template.c:2038
RangeCoder
Definition: mss3.c:62
AV_PIX_FMT_YUV444P14
#define AV_PIX_FMT_YUV444P14
Definition: pixfmt.h:416
av_image_check_sar
int av_image_check_sar(unsigned int w, unsigned int h, AVRational sar)
Check if the given sample aspect ratio of an image is valid.
Definition: imgutils.c:323
AV_PIX_FMT_GRAY12
#define AV_PIX_FMT_GRAY12
Definition: pixfmt.h:388
PlaneContext::quant_table
int16_t quant_table[MAX_CONTEXT_INPUTS][256]
Definition: ffv1.h:63
AV_RB24
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_WL16 uint64_t_TMPL AV_WB64 unsigned int_TMPL AV_WB32 unsigned int_TMPL AV_RB24
Definition: bytestream.h:97
decode_slice_header
static int decode_slice_header(const FFV1Context *f, FFV1Context *fs)
Definition: ffv1dec.c:166
AV_PIX_FMT_FLAG_PAL
#define AV_PIX_FMT_FLAG_PAL
Pixel format has a palette in data[1], values are indexes in this palette.
Definition: pixdesc.h:120
FFV1Context::version
int version
Definition: ffv1.h:81
AV_PIX_FMT_YUVA422P
@ AV_PIX_FMT_YUVA422P
planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
Definition: pixfmt.h:166
AV_PIX_FMT_YUV420P14
#define AV_PIX_FMT_YUV420P14
Definition: pixfmt.h:414